Merge branch 'preview' of https://github.com/makeplane/plane into feat-propel-tabs-implementation

This commit is contained in:
Jayash Tripathy
2025-09-16 18:19:10 +05:30
884 changed files with 13996 additions and 5836 deletions

View File

@@ -35,6 +35,10 @@ on:
- preview
- canary
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
TARGET_BRANCH: ${{ github.ref_name }}
ARM64_BUILD: ${{ github.event.inputs.arm64 }}
@@ -268,15 +272,14 @@ jobs:
if: ${{ needs.branch_build_setup.outputs.aio_build == 'true' }}
name: Build-Push AIO Docker Image
runs-on: ubuntu-22.04
needs: [
branch_build_setup,
branch_build_push_admin,
branch_build_push_web,
branch_build_push_space,
branch_build_push_live,
branch_build_push_api,
branch_build_push_proxy
]
needs:
- branch_build_setup
- branch_build_push_admin
- branch_build_push_web
- branch_build_push_space
- branch_build_push_live
- branch_build_push_api
- branch_build_push_proxy
steps:
- name: Checkout Files
uses: actions/checkout@v4
@@ -285,7 +288,7 @@ jobs:
id: prepare_aio_assets
run: |
cd deployments/aio/community
if [ "${{ needs.branch_build_setup.outputs.build_type }}" == "Release" ]; then
aio_version=${{ needs.branch_build_setup.outputs.release_version }}
else
@@ -324,7 +327,14 @@ jobs:
upload_build_assets:
name: Upload Build Assets
runs-on: ubuntu-22.04
needs: [branch_build_setup, branch_build_push_admin, branch_build_push_web, branch_build_push_space, branch_build_push_live, branch_build_push_api, branch_build_push_proxy]
needs:
- branch_build_setup
- branch_build_push_admin
- branch_build_push_web
- branch_build_push_space
- branch_build_push_live
- branch_build_push_api
- branch_build_push_proxy
steps:
- name: Checkout Files
uses: actions/checkout@v4
@@ -397,4 +407,3 @@ jobs:
${{ github.workspace }}/deployments/cli/community/docker-compose.yml
${{ github.workspace }}/deployments/cli/community/variables.env
${{ github.workspace }}/deployments/swarm/community/swarm.sh

View File

@@ -17,8 +17,6 @@ jobs:
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '18'
- name: Get PR Branch version
run: echo "PR_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV

View File

@@ -3,11 +3,21 @@ name: Build and lint API
on:
workflow_dispatch:
pull_request:
branches: ["preview"]
types: ["opened", "synchronize", "ready_for_review", "review_requested", "reopened"]
branches:
- "preview"
types:
- "opened"
- "synchronize"
- "ready_for_review"
- "review_requested"
- "reopened"
paths:
- "apps/api/**"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
lint-api:
name: Lint API

View File

@@ -3,21 +3,18 @@ name: Build and lint web apps
on:
workflow_dispatch:
pull_request:
branches: ["preview"]
branches:
- "preview"
types:
[
"opened",
"synchronize",
"ready_for_review",
"review_requested",
"reopened",
]
paths:
- "**.tsx?"
- "**.jsx?"
- "**.css"
- "**.json"
- "!apps/api/**"
- "opened"
- "synchronize"
- "ready_for_review"
- "review_requested"
- "reopened"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build-and-lint:
@@ -27,16 +24,18 @@ jobs:
if: |
github.event.pull_request.draft == false &&
github.event.pull_request.requested_reviewers != null
env:
TURBO_SCM_BASE: ${{ github.event.pull_request.base.sha }}
TURBO_SCM_HEAD: ${{ github.sha }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 2
fetch-depth: 50
filter: blob:none
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version-file: ".nvmrc"
- name: Enable Corepack and pnpm
run: corepack enable pnpm
@@ -44,11 +43,11 @@ jobs:
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Lint web apps
run: pnpm run check:lint
- name: Lint Affected
run: pnpm turbo run check:lint --affected
- name: Check format
run: pnpm run check:format
- name: Check Affected format
run: pnpm turbo run check:format --affected
- name: Build apps
run: pnpm run build
- name: Build Affected
run: pnpm turbo run build --affected

2
.gitignore vendored
View File

@@ -97,3 +97,5 @@ dev-editor
# Redis
*.rdb
*.rdb.gz
storybook-static

5
.npmrc
View File

@@ -14,13 +14,10 @@ strict-peer-dependencies=false
# Turbo occasionally performs postinstall tasks for optimal performance
# moved to pnpm-workspace.yaml: onlyBuiltDependencies (e.g., allow turbo)
public-hoist-pattern[]=eslint
public-hoist-pattern[]=*eslint*
public-hoist-pattern[]=prettier
public-hoist-pattern[]=typescript
# Enforce Node version for consistent installs
use-node-version=22.18.0
# Reproducible installs across CI and dev
prefer-frozen-lockfile=true

1
.nvmrc
View File

@@ -1 +0,0 @@
lts/jod

12
apps/admin/.eslintignore Normal file
View File

@@ -0,0 +1,12 @@
.next/*
out/*
public/*
dist/*
node_modules/*
.turbo/*
.env*
.env
.env.local
.env.development
.env.production
.env.test

View File

@@ -1,5 +1,4 @@
module.exports = {
root: true,
extends: ["@plane/eslint-config/next.js"],
parser: "@typescript-eslint/parser",
};

View File

@@ -9,7 +9,7 @@ import { useInstance } from "@/hooks/store";
// components
import { InstanceEmailForm } from "./email-config-form";
const InstanceEmailPage = observer(() => {
const InstanceEmailPage: React.FC = observer(() => {
// store
const { fetchInstanceConfigurations, formattedConfig, disableEmail } = useInstance();
@@ -29,7 +29,7 @@ const InstanceEmailPage = observer(() => {
message: "Email feature has been disabled",
type: TOAST_TYPE.SUCCESS,
});
} catch (error) {
} catch (_error) {
setToast({
title: "Error disabling email",
message: "Failed to disable email feature. Please try again.",

View File

@@ -7,7 +7,8 @@ import { ExternalLink, FileText, HelpCircle, MoveLeft } from "lucide-react";
import { Transition } from "@headlessui/react";
// plane internal packages
import { WEB_BASE_URL } from "@plane/constants";
import { DiscordIcon, GithubIcon, Tooltip } from "@plane/ui";
import { DiscordIcon, GithubIcon } from "@plane/propel/icons";
import { Tooltip } from "@plane/propel/tooltip";
import { cn } from "@plane/utils";
// hooks
import { useTheme } from "@/hooks/store";

View File

@@ -5,7 +5,8 @@ import Link from "next/link";
import { usePathname } from "next/navigation";
import { Image, BrainCog, Cog, Lock, Mail } from "lucide-react";
// plane internal packages
import { Tooltip, WorkspaceIcon } from "@plane/ui";
import { WorkspaceIcon } from "@plane/propel/icons";
import { Tooltip } from "@plane/propel/tooltip";
import { cn } from "@plane/utils";
// hooks
import { useTheme } from "@/hooks/store";

View File

@@ -1,7 +1,7 @@
"use client";
import Link from "next/link";
import { PlaneLockup } from "@plane/ui";
import { PlaneLockup } from "@plane/propel/icons";
export const AuthHeader = () => (
<div className="flex items-center justify-between gap-6 w-full flex-shrink-0 sticky top-0">

View File

@@ -25,9 +25,8 @@ export const EmailCodesConfiguration: React.FC<Props> = observer((props) => {
<ToggleSwitch
value={Boolean(parseInt(enableMagicLogin))}
onChange={() => {
Boolean(parseInt(enableMagicLogin)) === true
? updateConfig("ENABLE_MAGIC_LINK_LOGIN", "0")
: updateConfig("ENABLE_MAGIC_LINK_LOGIN", "1");
const newEnableMagicLogin = Boolean(parseInt(enableMagicLogin)) === true ? "0" : "1";
updateConfig("ENABLE_MAGIC_LINK_LOGIN", newEnableMagicLogin);
}}
size="sm"
disabled={disabled}

View File

@@ -35,9 +35,8 @@ export const GithubConfiguration: React.FC<Props> = observer((props) => {
<ToggleSwitch
value={Boolean(parseInt(enableGithubConfig))}
onChange={() => {
Boolean(parseInt(enableGithubConfig)) === true
? updateConfig("IS_GITHUB_ENABLED", "0")
: updateConfig("IS_GITHUB_ENABLED", "1");
const newEnableGithubConfig = Boolean(parseInt(enableGithubConfig)) === true ? "0" : "1";
updateConfig("IS_GITHUB_ENABLED", newEnableGithubConfig);
}}
size="sm"
disabled={disabled}

View File

@@ -35,9 +35,8 @@ export const GitlabConfiguration: React.FC<Props> = observer((props) => {
<ToggleSwitch
value={Boolean(parseInt(enableGitlabConfig))}
onChange={() => {
Boolean(parseInt(enableGitlabConfig)) === true
? updateConfig("IS_GITLAB_ENABLED", "0")
: updateConfig("IS_GITLAB_ENABLED", "1");
const newEnableGitlabConfig = Boolean(parseInt(enableGitlabConfig)) === true ? "0" : "1";
updateConfig("IS_GITLAB_ENABLED", newEnableGitlabConfig);
}}
size="sm"
disabled={disabled}

View File

@@ -35,9 +35,8 @@ export const GoogleConfiguration: React.FC<Props> = observer((props) => {
<ToggleSwitch
value={Boolean(parseInt(enableGoogleConfig))}
onChange={() => {
Boolean(parseInt(enableGoogleConfig)) === true
? updateConfig("IS_GOOGLE_ENABLED", "0")
: updateConfig("IS_GOOGLE_ENABLED", "1");
const newEnableGoogleConfig = Boolean(parseInt(enableGoogleConfig)) === true ? "0" : "1";
updateConfig("IS_GOOGLE_ENABLED", newEnableGoogleConfig);
}}
size="sm"
disabled={disabled}

View File

@@ -25,9 +25,8 @@ export const PasswordLoginConfiguration: React.FC<Props> = observer((props) => {
<ToggleSwitch
value={Boolean(parseInt(enableEmailPassword))}
onChange={() => {
Boolean(parseInt(enableEmailPassword)) === true
? updateConfig("ENABLE_EMAIL_PASSWORD", "0")
: updateConfig("ENABLE_EMAIL_PASSWORD", "1");
const newEnableEmailPassword = Boolean(parseInt(enableEmailPassword)) === true ? "0" : "1";
updateConfig("ENABLE_EMAIL_PASSWORD", newEnableEmailPassword);
}}
size="sm"
disabled={disabled}

View File

@@ -1,7 +1,7 @@
"use client";
import Link from "next/link";
import { Tooltip } from "@plane/ui";
import { Tooltip } from "@plane/propel/tooltip";
type Props = {
label?: string;

View File

@@ -2,7 +2,7 @@ import { observer } from "mobx-react";
import { ExternalLink } from "lucide-react";
// plane internal packages
import { WEB_BASE_URL } from "@plane/constants";
import { Tooltip } from "@plane/ui";
import { Tooltip } from "@plane/propel/tooltip";
import { getFileURL } from "@plane/utils";
// hooks
import { useWorkspace } from "@/hooks/store";

View File

@@ -209,7 +209,7 @@ export class InstanceStore implements IInstanceStore {
});
});
await this.instanceService.disableEmail();
} catch (error) {
} catch (_error) {
console.error("Error disabling the email");
this.instanceConfigurations = instanceConfigurations;
}

View File

@@ -1,7 +1,7 @@
{
"name": "admin",
"description": "Admin UI for Plane",
"version": "0.28.0",
"version": "1.0.0",
"license": "AGPL-3.0",
"private": true,
"scripts": {
@@ -26,30 +26,30 @@
"@plane/ui": "workspace:*",
"@plane/utils": "workspace:*",
"autoprefixer": "10.4.14",
"axios": "1.11.0",
"lodash": "^4.17.21",
"lucide-react": "^0.469.0",
"mobx": "^6.12.0",
"mobx-react": "^9.1.1",
"next": "14.2.30",
"axios": "catalog:",
"lodash": "catalog:",
"lucide-react": "catalog:",
"mobx": "catalog:",
"mobx-react": "catalog:",
"next": "catalog:",
"next-themes": "^0.2.1",
"postcss": "^8.4.49",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react": "catalog:",
"react-dom": "catalog:",
"react-hook-form": "7.51.5",
"sharp": "^0.33.5",
"swr": "^2.2.4",
"uuid": "^9.0.1"
"sharp": "catalog:",
"swr": "catalog:",
"uuid": "catalog:"
},
"devDependencies": {
"@plane/eslint-config": "workspace:*",
"@plane/tailwind-config": "workspace:*",
"@plane/typescript-config": "workspace:*",
"@types/lodash": "^4.17.6",
"@types/lodash": "catalog:",
"@types/node": "18.16.1",
"@types/react": "^18.3.11",
"@types/react-dom": "^18.2.18",
"@types/react": "catalog:",
"@types/react-dom": "catalog:",
"@types/uuid": "^9.0.8",
"typescript": "5.8.3"
"typescript": "catalog:"
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "plane-api",
"version": "0.28.0",
"version": "1.0.0",
"license": "AGPL-3.0",
"private": true,
"description": "API server powering Plane's backend"

View File

@@ -91,6 +91,7 @@ class BaseSerializer(serializers.ModelSerializer):
"project_lead": UserLiteSerializer,
"state": StateLiteSerializer,
"created_by": UserLiteSerializer,
"updated_by": UserLiteSerializer,
"issue": IssueSerializer,
"actor": UserLiteSerializer,
"owned_by": UserLiteSerializer,

View File

@@ -24,7 +24,6 @@ from plane.db.models import (
)
from plane.utils.content_validator import (
validate_html_content,
validate_json_content,
validate_binary_data,
)
@@ -89,20 +88,24 @@ class IssueSerializer(BaseSerializer):
raise serializers.ValidationError("Invalid HTML passed")
# Validate description content for security
if data.get("description"):
is_valid, error_msg = validate_json_content(data["description"])
if not is_valid:
raise serializers.ValidationError({"description": error_msg})
if data.get("description_html"):
is_valid, error_msg = validate_html_content(data["description_html"])
is_valid, error_msg, sanitized_html = validate_html_content(
data["description_html"]
)
if not is_valid:
raise serializers.ValidationError({"description_html": error_msg})
raise serializers.ValidationError(
{"error": "html content is not valid"}
)
# Update the data with sanitized HTML if available
if sanitized_html is not None:
data["description_html"] = sanitized_html
if data.get("description_binary"):
is_valid, error_msg = validate_binary_data(data["description_binary"])
if not is_valid:
raise serializers.ValidationError({"description_binary": error_msg})
raise serializers.ValidationError(
{"description_binary": "Invalid binary data"}
)
# Validate assignees are from project
if data.get("assignees", []):

View File

@@ -12,7 +12,6 @@ from plane.db.models import (
from plane.utils.content_validator import (
validate_html_content,
validate_json_content,
)
from .base import BaseSerializer
@@ -200,27 +199,18 @@ class ProjectSerializer(BaseSerializer):
)
# Validate description content for security
if "description" in data and data["description"]:
# For Project, description might be text field, not JSON
if isinstance(data["description"], dict):
is_valid, error_msg = validate_json_content(data["description"])
if not is_valid:
raise serializers.ValidationError({"description": error_msg})
if "description_text" in data and data["description_text"]:
is_valid, error_msg = validate_json_content(data["description_text"])
if not is_valid:
raise serializers.ValidationError({"description_text": error_msg})
if "description_html" in data and data["description_html"]:
if isinstance(data["description_html"], dict):
is_valid, error_msg = validate_json_content(data["description_html"])
else:
is_valid, error_msg = validate_html_content(
is_valid, error_msg, sanitized_html = validate_html_content(
str(data["description_html"])
)
# Update the data with sanitized HTML if available
if sanitized_html is not None:
data["description_html"] = sanitized_html
if not is_valid:
raise serializers.ValidationError({"description_html": error_msg})
raise serializers.ValidationError(
{"error": "html content is not valid"}
)
return data

View File

@@ -39,13 +39,31 @@ def allow_permission(allowed_roles, level="PROJECT", creator=False, model=None):
).exists():
return view_func(instance, request, *args, **kwargs)
else:
if ProjectMember.objects.filter(
is_user_has_allowed_role = ProjectMember.objects.filter(
member=request.user,
workspace__slug=kwargs["slug"],
project_id=kwargs["project_id"],
role__in=allowed_role_values,
is_active=True,
).exists():
).exists()
# Return if the user has the allowed role else if they are workspace admin and part of the project regardless of the role
if is_user_has_allowed_role:
return view_func(instance, request, *args, **kwargs)
elif (
ProjectMember.objects.filter(
member=request.user,
workspace__slug=kwargs["slug"],
project_id=kwargs["project_id"],
is_active=True,
).exists()
and WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=kwargs["slug"],
role=ROLE.ADMIN.value,
is_active=True,
).exists()
):
return view_func(instance, request, *args, **kwargs)
# Return permission denied if no conditions are met

View File

@@ -3,11 +3,7 @@ from rest_framework.permissions import SAFE_METHODS, BasePermission
# Module import
from plane.db.models import ProjectMember, WorkspaceMember
# Permission Mappings
Admin = 20
Member = 15
Guest = 5
from plane.db.models.project import ROLE
class ProjectBasePermission(BasePermission):
@@ -26,18 +22,31 @@ class ProjectBasePermission(BasePermission):
return WorkspaceMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role__in=[Admin, Member],
role__in=[ROLE.ADMIN.value, ROLE.MEMBER.value],
is_active=True,
).exists()
## Only Project Admins can update project attributes
return ProjectMember.objects.filter(
project_member_qs = ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role=Admin,
project_id=view.project_id,
is_active=True,
).exists()
)
## Only project admins or workspace admin who is part of the project can access
if project_member_qs.filter(role=ROLE.ADMIN.value).exists():
return True
else:
return (
project_member_qs.exists()
and WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=view.workspace_slug,
role=ROLE.ADMIN.value,
is_active=True,
).exists()
)
class ProjectMemberPermission(BasePermission):
@@ -55,7 +64,7 @@ class ProjectMemberPermission(BasePermission):
return WorkspaceMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role__in=[Admin, Member],
role__in=[ROLE.ADMIN.value, ROLE.MEMBER.value],
is_active=True,
).exists()
@@ -63,7 +72,7 @@ class ProjectMemberPermission(BasePermission):
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role__in=[Admin, Member],
role__in=[ROLE.ADMIN.value, ROLE.MEMBER.value],
project_id=view.project_id,
is_active=True,
).exists()
@@ -97,7 +106,7 @@ class ProjectEntityPermission(BasePermission):
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role__in=[Admin, Member],
role__in=[ROLE.ADMIN.value, ROLE.MEMBER.value],
project_id=view.project_id,
is_active=True,
).exists()

View File

@@ -23,7 +23,6 @@ from plane.db.models import (
)
from plane.utils.content_validator import (
validate_html_content,
validate_json_content,
validate_binary_data,
)
from plane.app.permissions import ROLE
@@ -76,20 +75,24 @@ class DraftIssueCreateSerializer(BaseSerializer):
raise serializers.ValidationError("Start date cannot exceed target date")
# Validate description content for security
if "description" in attrs and attrs["description"]:
is_valid, error_msg = validate_json_content(attrs["description"])
if not is_valid:
raise serializers.ValidationError({"description": error_msg})
if "description_html" in attrs and attrs["description_html"]:
is_valid, error_msg = validate_html_content(attrs["description_html"])
is_valid, error_msg, sanitized_html = validate_html_content(
attrs["description_html"]
)
if not is_valid:
raise serializers.ValidationError({"description_html": error_msg})
raise serializers.ValidationError(
{"error": "html content is not valid"}
)
# Update the attrs with sanitized HTML if available
if sanitized_html is not None:
attrs["description_html"] = sanitized_html
if "description_binary" in attrs and attrs["description_binary"]:
is_valid, error_msg = validate_binary_data(attrs["description_binary"])
if not is_valid:
raise serializers.ValidationError({"description_binary": error_msg})
raise serializers.ValidationError(
{"description_binary": "Invalid binary data"}
)
# Validate assignees are from project
if attrs.get("assignee_ids", []):

View File

@@ -43,7 +43,6 @@ from plane.db.models import (
)
from plane.utils.content_validator import (
validate_html_content,
validate_json_content,
validate_binary_data,
)
@@ -128,20 +127,24 @@ class IssueCreateSerializer(BaseSerializer):
raise serializers.ValidationError("Start date cannot exceed target date")
# Validate description content for security
if "description" in attrs and attrs["description"]:
is_valid, error_msg = validate_json_content(attrs["description"])
if not is_valid:
raise serializers.ValidationError({"description": error_msg})
if "description_html" in attrs and attrs["description_html"]:
is_valid, error_msg = validate_html_content(attrs["description_html"])
is_valid, error_msg, sanitized_html = validate_html_content(
attrs["description_html"]
)
if not is_valid:
raise serializers.ValidationError({"description_html": error_msg})
raise serializers.ValidationError(
{"error": "html content is not valid"}
)
# Update the attrs with sanitized HTML if available
if sanitized_html is not None:
attrs["description_html"] = sanitized_html
if "description_binary" in attrs and attrs["description_binary"]:
is_valid, error_msg = validate_binary_data(attrs["description_binary"])
if not is_valid:
raise serializers.ValidationError({"description_binary": error_msg})
raise serializers.ValidationError(
{"description_binary": "Invalid binary data"}
)
# Validate assignees are from project
if attrs.get("assignee_ids", []):
@@ -664,16 +667,33 @@ class IssueReactionSerializer(BaseSerializer):
class IssueReactionLiteSerializer(DynamicBaseSerializer):
display_name = serializers.CharField(source="actor.display_name", read_only=True)
class Meta:
model = IssueReaction
fields = ["id", "actor", "issue", "reaction"]
fields = ["id", "actor", "issue", "reaction", "display_name"]
class CommentReactionSerializer(BaseSerializer):
display_name = serializers.CharField(source="actor.display_name", read_only=True)
class Meta:
model = CommentReaction
fields = "__all__"
read_only_fields = ["workspace", "project", "comment", "actor", "deleted_at"]
fields = [
"id",
"actor",
"comment",
"reaction",
"display_name",
"deleted_at",
"workspace",
"project",
"created_at",
"updated_at",
"created_by",
"updated_by",
]
read_only_fields = ["workspace", "project", "comment", "actor", "deleted_at", "created_by", "updated_by"]
class IssueVoteSerializer(BaseSerializer):
@@ -908,9 +928,14 @@ class IssueLiteSerializer(DynamicBaseSerializer):
class IssueDetailSerializer(IssueSerializer):
description_html = serializers.CharField()
is_subscribed = serializers.BooleanField(read_only=True)
is_intake = serializers.BooleanField(read_only=True)
class Meta(IssueSerializer.Meta):
fields = IssueSerializer.Meta.fields + ["description_html", "is_subscribed"]
fields = IssueSerializer.Meta.fields + [
"description_html",
"is_subscribed",
"is_intake",
]
read_only_fields = fields

View File

@@ -7,7 +7,6 @@ from .base import BaseSerializer
from plane.utils.content_validator import (
validate_binary_data,
validate_html_content,
validate_json_content,
)
from plane.db.models import (
Page,
@@ -229,23 +228,13 @@ class PageBinaryUpdateSerializer(serializers.Serializer):
return value
# Use the validation function from utils
is_valid, error_message = validate_html_content(value)
is_valid, error_message, sanitized_html = validate_html_content(value)
if not is_valid:
raise serializers.ValidationError(error_message)
return value
# Return sanitized HTML if available, otherwise return original
return sanitized_html if sanitized_html is not None else value
def validate_description(self, value):
"""Validate the JSON description"""
if not value:
return value
# Use the validation function from utils
is_valid, error_message = validate_json_content(value)
if not is_valid:
raise serializers.ValidationError(error_message)
return value
def update(self, instance, validated_data):
"""Update the page instance with validated data"""

View File

@@ -15,8 +15,6 @@ from plane.db.models import (
)
from plane.utils.content_validator import (
validate_html_content,
validate_json_content,
validate_binary_data,
)
@@ -65,27 +63,18 @@ class ProjectSerializer(BaseSerializer):
def validate(self, data):
# Validate description content for security
if "description" in data and data["description"]:
# For Project, description might be text field, not JSON
if isinstance(data["description"], dict):
is_valid, error_msg = validate_json_content(data["description"])
if not is_valid:
raise serializers.ValidationError({"description": error_msg})
if "description_text" in data and data["description_text"]:
is_valid, error_msg = validate_json_content(data["description_text"])
if not is_valid:
raise serializers.ValidationError({"description_text": error_msg})
if "description_html" in data and data["description_html"]:
if isinstance(data["description_html"], dict):
is_valid, error_msg = validate_json_content(data["description_html"])
else:
is_valid, error_msg = validate_html_content(
str(data["description_html"])
)
is_valid, error_msg, sanitized_html = validate_html_content(
str(data["description_html"])
)
# Update the data with sanitized HTML if available
if sanitized_html is not None:
data["description_html"] = sanitized_html
if not is_valid:
raise serializers.ValidationError({"description_html": error_msg})
raise serializers.ValidationError(
{"error": "html content is not valid"}
)
return data

View File

@@ -26,7 +26,6 @@ from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
from plane.utils.url import contains_url
from plane.utils.content_validator import (
validate_html_content,
validate_json_content,
validate_binary_data,
)
@@ -319,20 +318,24 @@ class StickySerializer(BaseSerializer):
def validate(self, data):
# Validate description content for security
if "description" in data and data["description"]:
is_valid, error_msg = validate_json_content(data["description"])
if not is_valid:
raise serializers.ValidationError({"description": error_msg})
if "description_html" in data and data["description_html"]:
is_valid, error_msg = validate_html_content(data["description_html"])
is_valid, error_msg, sanitized_html = validate_html_content(
data["description_html"]
)
if not is_valid:
raise serializers.ValidationError({"description_html": error_msg})
raise serializers.ValidationError(
{"error": "html content is not valid"}
)
# Update the data with sanitized HTML if available
if sanitized_html is not None:
data["description_html"] = sanitized_html
if "description_binary" in data and data["description_binary"]:
is_valid, error_msg = validate_binary_data(data["description_binary"])
if not is_valid:
raise serializers.ValidationError({"description_binary": error_msg})
raise serializers.ValidationError(
{"description_binary": "Invalid binary data"}
)
return data

View File

@@ -441,7 +441,11 @@ class WorkspaceFileAssetEndpoint(BaseAPIView):
# Get the presigned URL
storage = S3Storage(request=request)
# Generate a presigned URL to share an S3 object
signed_url = storage.generate_presigned_url(object_name=asset.asset.name)
signed_url = storage.generate_presigned_url(
object_name=asset.asset.name,
disposition="attachment",
filename=asset.attributes.get("name"),
)
# Redirect to the signed URL
return HttpResponseRedirect(signed_url)
@@ -641,7 +645,11 @@ class ProjectAssetEndpoint(BaseAPIView):
# Get the presigned URL
storage = S3Storage(request=request)
# Generate a presigned URL to share an S3 object
signed_url = storage.generate_presigned_url(object_name=asset.asset.name)
signed_url = storage.generate_presigned_url(
object_name=asset.asset.name,
disposition="attachment",
filename=asset.attributes.get("name"),
)
# Redirect to the signed URL
return HttpResponseRedirect(signed_url)

View File

@@ -51,6 +51,7 @@ from plane.db.models import (
IssueRelation,
IssueAssignee,
IssueLabel,
IntakeIssue,
)
from plane.utils.grouper import (
issue_group_values,
@@ -1223,7 +1224,7 @@ class IssueDetailIdentifierEndpoint(BaseAPIView):
# Fetch the issue
issue = (
Issue.issue_objects.filter(project_id=project.id)
Issue.objects.filter(project_id=project.id)
.filter(workspace__slug=slug)
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module")
@@ -1315,6 +1316,16 @@ class IssueDetailIdentifierEndpoint(BaseAPIView):
)
)
)
.annotate(
is_intake=Exists(
IntakeIssue.objects.filter(
issue=OuterRef("id"),
status__in=[-2, 0],
workspace__slug=slug,
project_id=project.id,
)
)
)
).first()
# Check if the issue exists

View File

@@ -198,6 +198,7 @@ class PageViewSet(BaseViewSet):
def retrieve(self, request, slug, project_id, pk=None):
page = self.get_queryset().filter(pk=pk).first()
project = Project.objects.get(pk=project_id)
track_visit = request.query_params.get("track_visit", "true").lower() == "true"
"""
if the role is guest and guest_view_all_features is false and owned by is not
@@ -230,13 +231,14 @@ class PageViewSet(BaseViewSet):
).values_list("entity_identifier", flat=True)
data = PageDetailSerializer(page).data
data["issue_ids"] = issue_ids
recent_visited_task.delay(
slug=slug,
entity_name="page",
entity_identifier=pk,
user_id=request.user.id,
project_id=project_id,
)
if track_visit:
recent_visited_task.delay(
slug=slug,
entity_name="page",
entity_identifier=pk,
user_id=request.user.id,
project_id=project_id,
)
return Response(data, status=status.HTTP_200_OK)
@allow_permission([ROLE.ADMIN], model=Page, creator=True)

View File

@@ -5,13 +5,12 @@ from django.utils import timezone
import json
# Django imports
from django.db import IntegrityError
from django.db.models import Exists, F, OuterRef, Prefetch, Q, Subquery
from django.core.serializers.json import DjangoJSONEncoder
# Third Party imports
from rest_framework.response import Response
from rest_framework import serializers, status
from rest_framework import status
from rest_framework.permissions import AllowAny
# Module imports
@@ -106,7 +105,10 @@ class ProjectViewSet(BaseViewSet):
fields = [field for field in request.GET.get("fields", "").split(",") if field]
projects = self.get_queryset().order_by("sort_order", "name")
if WorkspaceMember.objects.filter(
member=request.user, workspace__slug=slug, is_active=True, role=5
member=request.user,
workspace__slug=slug,
is_active=True,
role=ROLE.GUEST.value,
).exists():
projects = projects.filter(
project_projectmember__member=self.request.user,
@@ -114,7 +116,10 @@ class ProjectViewSet(BaseViewSet):
)
if WorkspaceMember.objects.filter(
member=request.user, workspace__slug=slug, is_active=True, role=15
member=request.user,
workspace__slug=slug,
is_active=True,
role=ROLE.MEMBER.value,
).exists():
projects = projects.filter(
Q(
@@ -189,7 +194,10 @@ class ProjectViewSet(BaseViewSet):
)
if WorkspaceMember.objects.filter(
member=request.user, workspace__slug=slug, is_active=True, role=5
member=request.user,
workspace__slug=slug,
is_active=True,
role=ROLE.GUEST.value,
).exists():
projects = projects.filter(
project_projectmember__member=self.request.user,
@@ -197,7 +205,10 @@ class ProjectViewSet(BaseViewSet):
)
if WorkspaceMember.objects.filter(
member=request.user, workspace__slug=slug, is_active=True, role=15
member=request.user,
workspace__slug=slug,
is_active=True,
role=ROLE.MEMBER.value,
).exists():
projects = projects.filter(
Q(
@@ -250,7 +261,9 @@ class ProjectViewSet(BaseViewSet):
# Add the user as Administrator to the project
_ = ProjectMember.objects.create(
project_id=serializer.data["id"], member=request.user, role=20
project_id=serializer.data["id"],
member=request.user,
role=ROLE.ADMIN.value,
)
# Also create the issue property for the user
_ = IssueUserProperty.objects.create(
@@ -263,7 +276,7 @@ class ProjectViewSet(BaseViewSet):
ProjectMember.objects.create(
project_id=serializer.data["id"],
member_id=serializer.data["project_lead"],
role=20,
role=ROLE.ADMIN.value,
)
# Also create the issue property for the user
IssueUserProperty.objects.create(
@@ -341,13 +354,23 @@ class ProjectViewSet(BaseViewSet):
def partial_update(self, request, slug, pk=None):
# try:
if not ProjectMember.objects.filter(
is_workspace_admin = WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=slug,
is_active=True,
role=ROLE.ADMIN.value,
).exists()
is_project_admin = ProjectMember.objects.filter(
member=request.user,
workspace__slug=slug,
project_id=pk,
role=20,
role=ROLE.ADMIN.value,
is_active=True,
).exists():
).exists()
# Return error for if the user is neither workspace admin nor project admin
if not is_project_admin and not is_workspace_admin:
return Response(
{"error": "You don't have the required permissions."},
status=status.HTTP_403_FORBIDDEN,
@@ -402,13 +425,16 @@ class ProjectViewSet(BaseViewSet):
def destroy(self, request, slug, pk):
if (
WorkspaceMember.objects.filter(
member=request.user, workspace__slug=slug, is_active=True, role=20
member=request.user,
workspace__slug=slug,
is_active=True,
role=ROLE.ADMIN.value,
).exists()
or ProjectMember.objects.filter(
member=request.user,
workspace__slug=slug,
project_id=pk,
role=20,
role=ROLE.ADMIN.value,
is_active=True,
).exists()
):

View File

@@ -59,9 +59,10 @@ class IssueSearchEndpoint(BaseAPIView):
)
related_issue_ids = [item for sublist in related_issue_ids for item in sublist]
related_issue_ids.append(issue_id)
if issue:
issues = issues.filter(~Q(pk=issue_id), ~Q(pk__in=related_issue_ids))
issues = issues.exclude(pk__in=related_issue_ids)
return issues

View File

@@ -172,12 +172,14 @@ class WorkspaceDraftIssueViewSet(BaseViewSet):
{"error": "Issue not found"}, status=status.HTTP_404_NOT_FOUND
)
project_id = request.data.get("project_id", issue.project_id)
serializer = DraftIssueCreateSerializer(
issue,
data=request.data,
partial=True,
context={
"project_id": request.data.get("project_id", None),
"project_id": project_id,
"cycle_id": request.data.get("cycle_id", "not_provided"),
},
)

View File

@@ -7,7 +7,6 @@ from plane.app.serializers import StateSerializer
from plane.app.views.base import BaseAPIView
from plane.db.models import State
from plane.app.permissions import WorkspaceEntityPermission
from plane.utils.cache import cache_response
from collections import defaultdict
@@ -15,7 +14,6 @@ class WorkspaceStatesEndpoint(BaseAPIView):
permission_classes = [WorkspaceEntityPermission]
use_read_replica = True
@cache_response(60 * 60 * 2)
def get(self, request, slug):
states = State.objects.filter(
workspace__slug=slug,

View File

@@ -1,6 +1,3 @@
# Python imports
from urllib.parse import urlencode, urljoin
# Django imports
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
@@ -19,7 +16,7 @@ from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.utils.path_validator import validate_next_path
from plane.utils.path_validator import get_safe_redirect_url
class SignInAuthEndpoint(View):
@@ -34,11 +31,11 @@ class SignInAuthEndpoint(View):
error_message="INSTANCE_NOT_CONFIGURED",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
# Base URL join
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
@@ -58,10 +55,10 @@ class SignInAuthEndpoint(View):
)
params = exc.get_error_dict()
# Next path
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
@@ -76,10 +73,10 @@ class SignInAuthEndpoint(View):
payload={"email": str(email)},
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
@@ -92,10 +89,10 @@ class SignInAuthEndpoint(View):
payload={"email": str(email)},
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
@@ -112,19 +109,23 @@ class SignInAuthEndpoint(View):
user_login(request=request, user=user, is_app=True)
# Get the redirection path
if next_path:
path = str(validate_next_path(next_path))
path = next_path
else:
path = get_redirection_path(user=user)
# redirect to referer path
url = urljoin(base_host(request=request, is_app=True), path)
# Get the safe redirect URL
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=path,
params={},
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
@@ -141,10 +142,10 @@ class SignUpAuthEndpoint(View):
error_message="INSTANCE_NOT_CONFIGURED",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
@@ -161,10 +162,10 @@ class SignUpAuthEndpoint(View):
payload={"email": str(email)},
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
# Validate the email
@@ -179,10 +180,10 @@ class SignUpAuthEndpoint(View):
payload={"email": str(email)},
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
@@ -197,10 +198,10 @@ class SignUpAuthEndpoint(View):
payload={"email": str(email)},
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
@@ -217,17 +218,21 @@ class SignUpAuthEndpoint(View):
user_login(request=request, user=user, is_app=True)
# Get the redirection path
if next_path:
path = str(validate_next_path(next_path))
path = next_path
else:
path = get_redirection_path(user=user)
# redirect to referer path
url = urljoin(base_host(request=request, is_app=True), path)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=path,
params={},
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)

View File

@@ -1,5 +1,5 @@
# Python imports
import uuid
from urllib.parse import urlencode, urljoin
# Django import
from django.http import HttpResponseRedirect
@@ -16,8 +16,7 @@ from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.utils.path_validator import validate_next_path
from plane.utils.path_validator import get_safe_redirect_url
class GitHubOauthInitiateEndpoint(View):
def get(self, request):
@@ -35,10 +34,10 @@ class GitHubOauthInitiateEndpoint(View):
error_message="INSTANCE_NOT_CONFIGURED",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
try:
@@ -49,10 +48,10 @@ class GitHubOauthInitiateEndpoint(View):
return HttpResponseRedirect(auth_url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
@@ -61,7 +60,6 @@ class GitHubCallbackEndpoint(View):
def get(self, request):
code = request.GET.get("code")
state = request.GET.get("state")
base_host = request.session.get("host")
next_path = request.session.get("next_path")
if state != request.session.get("state", ""):
@@ -70,9 +68,11 @@ class GitHubCallbackEndpoint(View):
error_message="GITHUB_OAUTH_PROVIDER_ERROR",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
if not code:
@@ -81,9 +81,11 @@ class GitHubCallbackEndpoint(View):
error_message="GITHUB_OAUTH_PROVIDER_ERROR",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
try:
@@ -93,17 +95,23 @@ class GitHubCallbackEndpoint(View):
user = provider.authenticate()
# Login the user and record his device info
user_login(request=request, user=user, is_app=True)
# Get the redirection path
if next_path:
path = str(validate_next_path(next_path))
path = next_path
else:
path = get_redirection_path(user=user)
# redirect to referer path
url = urljoin(base_host, path)
# Get the safe redirect URL
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=path,
params={}
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)

View File

@@ -1,5 +1,5 @@
# Python imports
import uuid
from urllib.parse import urlencode, urljoin
# Django import
from django.http import HttpResponseRedirect
@@ -16,7 +16,7 @@ from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.utils.path_validator import validate_next_path
from plane.utils.path_validator import get_safe_redirect_url
class GitLabOauthInitiateEndpoint(View):
@@ -25,7 +25,7 @@ class GitLabOauthInitiateEndpoint(View):
request.session["host"] = base_host(request=request, is_app=True)
next_path = request.GET.get("next_path")
if next_path:
request.session["next_path"] = str(validate_next_path(next_path))
request.session["next_path"] = str(next_path)
# Check instance configuration
instance = Instance.objects.first()
@@ -35,10 +35,10 @@ class GitLabOauthInitiateEndpoint(View):
error_message="INSTANCE_NOT_CONFIGURED",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
try:
@@ -49,10 +49,10 @@ class GitLabOauthInitiateEndpoint(View):
return HttpResponseRedirect(auth_url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
@@ -61,7 +61,6 @@ class GitLabCallbackEndpoint(View):
def get(self, request):
code = request.GET.get("code")
state = request.GET.get("state")
base_host = request.session.get("host")
next_path = request.session.get("next_path")
if state != request.session.get("state", ""):
@@ -70,9 +69,11 @@ class GitLabCallbackEndpoint(View):
error_message="GITLAB_OAUTH_PROVIDER_ERROR",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
url = urljoin(base_host, "?" + urlencode(params))
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
if not code:
@@ -81,9 +82,11 @@ class GitLabCallbackEndpoint(View):
error_message="GITLAB_OAUTH_PROVIDER_ERROR",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
try:
@@ -94,16 +97,23 @@ class GitLabCallbackEndpoint(View):
# Login the user and record his device info
user_login(request=request, user=user, is_app=True)
# Get the redirection path
if next_path:
path = str(validate_next_path(next_path))
path = next_path
else:
path = get_redirection_path(user=user)
# redirect to referer path
url = urljoin(base_host, path)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=path,
params={}
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)

View File

@@ -1,6 +1,5 @@
# Python imports
import uuid
from urllib.parse import urlencode, urljoin
# Django import
from django.http import HttpResponseRedirect
@@ -18,7 +17,7 @@ from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.utils.path_validator import validate_next_path
from plane.utils.path_validator import get_safe_redirect_url
class GoogleOauthInitiateEndpoint(View):
@@ -36,10 +35,10 @@ class GoogleOauthInitiateEndpoint(View):
error_message="INSTANCE_NOT_CONFIGURED",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
@@ -51,10 +50,10 @@ class GoogleOauthInitiateEndpoint(View):
return HttpResponseRedirect(auth_url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
@@ -63,7 +62,6 @@ class GoogleCallbackEndpoint(View):
def get(self, request):
code = request.GET.get("code")
state = request.GET.get("state")
base_host = request.session.get("host")
next_path = request.session.get("next_path")
if state != request.session.get("state", ""):
@@ -72,9 +70,11 @@ class GoogleCallbackEndpoint(View):
error_message="GOOGLE_OAUTH_PROVIDER_ERROR",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
if not code:
exc = AuthenticationException(
@@ -82,9 +82,11 @@ class GoogleCallbackEndpoint(View):
error_message="GOOGLE_OAUTH_PROVIDER_ERROR",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
try:
provider = GoogleOAuthProvider(
@@ -94,15 +96,21 @@ class GoogleCallbackEndpoint(View):
# Login the user and record his device info
user_login(request=request, user=user, is_app=True)
# Get the redirection path
path = get_redirection_path(user=user)
# redirect to referer path
url = urljoin(
base_host, str(validate_next_path(next_path)) if next_path else path
if next_path:
path = next_path
else:
path = get_redirection_path(user=user)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=path,
params={}
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)

View File

@@ -1,6 +1,3 @@
# Python imports
from urllib.parse import urlencode, urljoin
# Django imports
from django.core.validators import validate_email
from django.http import HttpResponseRedirect
@@ -26,7 +23,7 @@ from plane.authentication.adapter.error import (
AUTHENTICATION_ERROR_CODES,
)
from plane.authentication.rate_limit import AuthenticationThrottle
from plane.utils.path_validator import validate_next_path
from plane.utils.path_validator import get_safe_redirect_url
class MagicGenerateEndpoint(APIView):
@@ -72,10 +69,10 @@ class MagicSignInEndpoint(View):
error_message="MAGIC_SIGN_IN_EMAIL_CODE_REQUIRED",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
@@ -88,10 +85,10 @@ class MagicSignInEndpoint(View):
error_message="USER_DOES_NOT_EXIST",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
@@ -107,7 +104,8 @@ class MagicSignInEndpoint(View):
# Login the user and record his device info
user_login(request=request, user=user, is_app=True)
if user.is_password_autoset and profile.is_onboarded:
path = "accounts/set-password"
# Redirect to the home page
path = "/"
else:
# Get the redirection path
path = (
@@ -116,15 +114,19 @@ class MagicSignInEndpoint(View):
else str(get_redirection_path(user=user))
)
# redirect to referer path
url = urljoin(base_host(request=request, is_app=True), path)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=path,
params={},
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
@@ -144,10 +146,10 @@ class MagicSignUpEndpoint(View):
error_message="MAGIC_SIGN_UP_EMAIL_CODE_REQUIRED",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
# Existing user
@@ -158,10 +160,10 @@ class MagicSignUpEndpoint(View):
error_message="USER_ALREADY_EXIST",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
@@ -177,18 +179,22 @@ class MagicSignUpEndpoint(View):
user_login(request=request, user=user, is_app=True)
# Get the redirection path
if next_path:
path = str(validate_next_path(next_path))
path = next_path
else:
path = get_redirection_path(user=user)
# redirect to referer path
url = urljoin(base_host(request=request, is_app=True), path)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=path,
params={},
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
url = get_safe_redirect_url(
base_url=base_host(request=request, is_app=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)

View File

@@ -1,6 +1,3 @@
# Python imports
from urllib.parse import urlencode
# Django imports
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
@@ -17,7 +14,7 @@ from plane.authentication.adapter.error import (
AUTHENTICATION_ERROR_CODES,
AuthenticationException,
)
from plane.utils.path_validator import validate_next_path
from plane.utils.path_validator import get_safe_redirect_url
class SignInAuthSpaceEndpoint(View):
@@ -32,9 +29,11 @@ class SignInAuthSpaceEndpoint(View):
error_message="INSTANCE_NOT_CONFIGURED",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
# set the referer as session to redirect after login
@@ -51,9 +50,11 @@ class SignInAuthSpaceEndpoint(View):
payload={"email": str(email)},
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
# Validate email
@@ -67,9 +68,11 @@ class SignInAuthSpaceEndpoint(View):
payload={"email": str(email)},
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
# Existing User
@@ -82,9 +85,11 @@ class SignInAuthSpaceEndpoint(View):
payload={"email": str(email)},
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
try:
@@ -95,13 +100,19 @@ class SignInAuthSpaceEndpoint(View):
# Login the user and record his device info
user_login(request=request, user=user, is_space=True)
# redirect to next path
url = f"{base_host(request=request, is_space=True)}{str(next_path) if next_path else ''}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params={}
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
@@ -117,9 +128,11 @@ class SignUpAuthSpaceEndpoint(View):
error_message="INSTANCE_NOT_CONFIGURED",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
email = request.POST.get("email", False)
@@ -135,9 +148,11 @@ class SignUpAuthSpaceEndpoint(View):
payload={"email": str(email)},
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
# Validate the email
email = email.strip().lower()
@@ -151,9 +166,11 @@ class SignUpAuthSpaceEndpoint(View):
payload={"email": str(email)},
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
# Existing User
@@ -166,9 +183,11 @@ class SignUpAuthSpaceEndpoint(View):
payload={"email": str(email)},
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
try:
@@ -179,11 +198,17 @@ class SignUpAuthSpaceEndpoint(View):
# Login the user and record his device info
user_login(request=request, user=user, is_space=True)
# redirect to referer path
url = f"{base_host(request=request, is_space=True)}{str(next_path) if next_path else ''}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params={}
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)

View File

@@ -1,6 +1,5 @@
# Python imports
import uuid
from urllib.parse import urlencode
# Django import
from django.http import HttpResponseRedirect
@@ -15,7 +14,7 @@ from plane.authentication.adapter.error import (
AUTHENTICATION_ERROR_CODES,
AuthenticationException,
)
from plane.utils.path_validator import validate_next_path
from plane.utils.path_validator import get_safe_redirect_url
class GitHubOauthInitiateSpaceEndpoint(View):
@@ -23,9 +22,6 @@ class GitHubOauthInitiateSpaceEndpoint(View):
# Get host and next path
request.session["host"] = base_host(request=request, is_space=True)
next_path = request.GET.get("next_path")
if next_path:
request.session["next_path"] = str(next_path)
# Check instance configuration
instance = Instance.objects.first()
if instance is None or not instance.is_setup_done:
@@ -34,9 +30,11 @@ class GitHubOauthInitiateSpaceEndpoint(View):
error_message="INSTANCE_NOT_CONFIGURED",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
try:
@@ -47,9 +45,11 @@ class GitHubOauthInitiateSpaceEndpoint(View):
return HttpResponseRedirect(auth_url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
@@ -66,9 +66,11 @@ class GitHubCallbackSpaceEndpoint(View):
error_message="GITHUB_OAUTH_PROVIDER_ERROR",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
if not code:
@@ -77,9 +79,11 @@ class GitHubCallbackSpaceEndpoint(View):
error_message="GITHUB_OAUTH_PROVIDER_ERROR",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
try:
@@ -89,11 +93,17 @@ class GitHubCallbackSpaceEndpoint(View):
user_login(request=request, user=user, is_space=True)
# Process workspace and project invitations
# redirect to referer path
url = f"{base_host(request=request, is_space=True)}{str(next_path) if next_path else ''}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)

View File

@@ -1,6 +1,5 @@
# Python imports
import uuid
from urllib.parse import urlencode
# Django import
from django.http import HttpResponseRedirect
@@ -15,7 +14,7 @@ from plane.authentication.adapter.error import (
AUTHENTICATION_ERROR_CODES,
AuthenticationException,
)
from plane.utils.path_validator import validate_next_path
from plane.utils.path_validator import get_safe_redirect_url
class GitLabOauthInitiateSpaceEndpoint(View):
@@ -23,8 +22,6 @@ class GitLabOauthInitiateSpaceEndpoint(View):
# Get host and next path
request.session["host"] = base_host(request=request, is_space=True)
next_path = request.GET.get("next_path")
if next_path:
request.session["next_path"] = str(next_path)
# Check instance configuration
instance = Instance.objects.first()
@@ -34,9 +31,11 @@ class GitLabOauthInitiateSpaceEndpoint(View):
error_message="INSTANCE_NOT_CONFIGURED",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
try:
@@ -47,9 +46,11 @@ class GitLabOauthInitiateSpaceEndpoint(View):
return HttpResponseRedirect(auth_url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
@@ -66,9 +67,11 @@ class GitLabCallbackSpaceEndpoint(View):
error_message="GITLAB_OAUTH_PROVIDER_ERROR",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
if not code:
@@ -77,9 +80,11 @@ class GitLabCallbackSpaceEndpoint(View):
error_message="GITLAB_OAUTH_PROVIDER_ERROR",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
try:
@@ -89,11 +94,17 @@ class GitLabCallbackSpaceEndpoint(View):
user_login(request=request, user=user, is_space=True)
# Process workspace and project invitations
# redirect to referer path
url = f"{base_host(request=request, is_space=True)}{str(next_path) if next_path else ''}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)

View File

@@ -1,6 +1,5 @@
# Python imports
import uuid
from urllib.parse import urlencode
# Django import
from django.http import HttpResponseRedirect
@@ -15,15 +14,13 @@ from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.utils.path_validator import validate_next_path
from plane.utils.path_validator import get_safe_redirect_url
class GoogleOauthInitiateSpaceEndpoint(View):
def get(self, request):
request.session["host"] = base_host(request=request, is_space=True)
next_path = request.GET.get("next_path")
if next_path:
request.session["next_path"] = str(next_path)
# Check instance configuration
instance = Instance.objects.first()
@@ -33,9 +30,11 @@ class GoogleOauthInitiateSpaceEndpoint(View):
error_message="INSTANCE_NOT_CONFIGURED",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
try:
@@ -46,9 +45,11 @@ class GoogleOauthInitiateSpaceEndpoint(View):
return HttpResponseRedirect(auth_url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
@@ -65,9 +66,11 @@ class GoogleCallbackSpaceEndpoint(View):
error_message="GOOGLE_OAUTH_PROVIDER_ERROR",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
if not code:
exc = AuthenticationException(
@@ -75,9 +78,11 @@ class GoogleCallbackSpaceEndpoint(View):
error_message="GOOGLE_OAUTH_PROVIDER_ERROR",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
try:
provider = GoogleOAuthProvider(request=request, code=code)
@@ -85,11 +90,17 @@ class GoogleCallbackSpaceEndpoint(View):
# Login the user and record his device info
user_login(request=request, user=user, is_space=True)
# redirect to referer path
url = f"{base_host(request=request, is_space=True)}{str(next_path) if next_path else ''}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params
)
return HttpResponseRedirect(url)

View File

@@ -1,6 +1,3 @@
# Python imports
from urllib.parse import urlencode
# Django imports
from django.core.validators import validate_email
from django.http import HttpResponseRedirect
@@ -23,7 +20,7 @@ from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.utils.path_validator import validate_next_path
from plane.utils.path_validator import get_safe_redirect_url
class MagicGenerateSpaceEndpoint(APIView):
@@ -66,9 +63,11 @@ class MagicSignInSpaceEndpoint(View):
error_message="MAGIC_SIGN_IN_EMAIL_CODE_REQUIRED",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
existing_user = User.objects.filter(email=email).first()
@@ -79,9 +78,11 @@ class MagicSignInSpaceEndpoint(View):
error_message="USER_DOES_NOT_EXIST",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
# Active User
@@ -93,15 +94,18 @@ class MagicSignInSpaceEndpoint(View):
# Login the user and record his device info
user_login(request=request, user=user, is_space=True)
# redirect to referer path
path = str(next_path) if next_path else ""
url = f"{base_host(request=request, is_space=True)}{path}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True), next_path=next_path
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
@@ -120,9 +124,11 @@ class MagicSignUpSpaceEndpoint(View):
error_message="MAGIC_SIGN_UP_EMAIL_CODE_REQUIRED",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
# Existing User
existing_user = User.objects.filter(email=email).first()
@@ -133,9 +139,11 @@ class MagicSignUpSpaceEndpoint(View):
error_message="USER_ALREADY_EXIST",
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)
try:
@@ -146,12 +154,16 @@ class MagicSignUpSpaceEndpoint(View):
# Login the user and record his device info
user_login(request=request, user=user, is_space=True)
# redirect to referer path
url = f"{base_host(request=request, is_space=True)}{str(next_path) if next_path else ''}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True), next_path=next_path
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path,
params=params,
)
return HttpResponseRedirect(url)

View File

@@ -7,7 +7,7 @@ from django.utils import timezone
# Module imports
from plane.authentication.utils.host import base_host, user_ip
from plane.db.models import User
from plane.utils.path_validator import validate_next_path
from plane.utils.path_validator import get_safe_redirect_url
class SignOutAuthSpaceEndpoint(View):
@@ -22,8 +22,14 @@ class SignOutAuthSpaceEndpoint(View):
user.save()
# Log the user out
logout(request)
url = f"{base_host(request=request, is_space=True)}{str(validate_next_path(next_path)) if next_path else ''}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path
)
return HttpResponseRedirect(url)
except Exception:
url = f"{base_host(request=request, is_space=True)}{str(validate_next_path(next_path)) if next_path else ''}"
url = get_safe_redirect_url(
base_url=base_host(request=request, is_space=True),
next_path=next_path
)
return HttpResponseRedirect(url)

View File

@@ -1,15 +0,0 @@
from django.utils import timezone
from datetime import timedelta
from plane.db.models import APIActivityLog
from celery import shared_task
@shared_task
def delete_api_logs():
# Get the logs older than 30 days to delete
logs_to_delete = APIActivityLog.objects.filter(
created_at__lte=timezone.now() - timedelta(days=30)
)
# Delete the logs
logs_to_delete._raw_delete(logs_to_delete.db)

View File

@@ -0,0 +1,423 @@
# Python imports
from datetime import timedelta
import logging
from typing import List, Dict, Any, Callable, Optional
import os
# Django imports
from django.utils import timezone
from django.db.models import F, Window, Subquery
from django.db.models.functions import RowNumber
# Third party imports
from celery import shared_task
from pymongo.errors import BulkWriteError
from pymongo.collection import Collection
from pymongo.operations import InsertOne
# Module imports
from plane.db.models import (
EmailNotificationLog,
PageVersion,
APIActivityLog,
IssueDescriptionVersion,
)
from plane.settings.mongo import MongoConnection
from plane.utils.exception_logger import log_exception
logger = logging.getLogger("plane.worker")
BATCH_SIZE = 1000
def get_mongo_collection(collection_name: str) -> Optional[Collection]:
"""Get MongoDB collection if available, otherwise return None."""
if not MongoConnection.is_configured():
logger.info("MongoDB not configured")
return None
try:
mongo_collection = MongoConnection.get_collection(collection_name)
logger.info(f"MongoDB collection '{collection_name}' connected successfully")
return mongo_collection
except Exception as e:
logger.error(f"Failed to get MongoDB collection: {str(e)}")
log_exception(e)
return None
def flush_to_mongo_and_delete(
mongo_collection: Optional[Collection],
buffer: List[Dict[str, Any]],
ids_to_delete: List[int],
model,
mongo_available: bool,
) -> None:
"""
Inserts a batch of records into MongoDB and deletes the corresponding rows from PostgreSQL.
"""
if not buffer:
logger.debug("No records to flush - buffer is empty")
return
logger.info(
f"Starting batch flush: {len(buffer)} records, {len(ids_to_delete)} IDs to delete"
)
mongo_archival_failed = False
# Try to insert into MongoDB if available
if mongo_collection is not None and mongo_available:
try:
mongo_collection.bulk_write([InsertOne(doc) for doc in buffer])
except BulkWriteError as bwe:
logger.error(f"MongoDB bulk write error: {str(bwe)}")
log_exception(bwe)
mongo_archival_failed = True
# If MongoDB is available and archival failed, log the error and return
if mongo_available and mongo_archival_failed:
logger.error(f"MongoDB archival failed for {len(buffer)} records")
return
# Delete from PostgreSQL - delete() returns (count, {model: count})
delete_result = model.all_objects.filter(id__in=ids_to_delete).delete()
deleted_count = (
delete_result[0] if delete_result and isinstance(delete_result, tuple) else 0
)
logger.info(f"Batch flush completed: {deleted_count} records deleted")
def process_cleanup_task(
queryset_func: Callable,
transform_func: Callable[[Dict], Dict],
model,
task_name: str,
collection_name: str,
):
"""
Generic function to process cleanup tasks.
Args:
queryset_func: Function that returns the queryset to process
transform_func: Function to transform each record for MongoDB
model: Django model class
task_name: Name of the task for logging
collection_name: MongoDB collection name
"""
logger.info(f"Starting {task_name} cleanup task")
# Get MongoDB collection
mongo_collection = get_mongo_collection(collection_name)
mongo_available = mongo_collection is not None
# Get queryset
queryset = queryset_func()
# Process records in batches
buffer: List[Dict[str, Any]] = []
ids_to_delete: List[int] = []
total_processed = 0
total_batches = 0
for record in queryset:
# Transform record for MongoDB
buffer.append(transform_func(record))
ids_to_delete.append(record["id"])
# Flush batch when it reaches BATCH_SIZE
if len(buffer) >= BATCH_SIZE:
total_batches += 1
flush_to_mongo_and_delete(
mongo_collection=mongo_collection,
buffer=buffer,
ids_to_delete=ids_to_delete,
model=model,
mongo_available=mongo_available,
)
total_processed += len(buffer)
buffer.clear()
ids_to_delete.clear()
# Process final batch if any records remain
if buffer:
total_batches += 1
flush_to_mongo_and_delete(
mongo_collection=mongo_collection,
buffer=buffer,
ids_to_delete=ids_to_delete,
model=model,
mongo_available=mongo_available,
)
total_processed += len(buffer)
logger.info(
f"{task_name} cleanup task completed",
extra={
"total_records_processed": total_processed,
"total_batches": total_batches,
"mongo_available": mongo_available,
"collection_name": collection_name,
},
)
# Transform functions for each model
def transform_api_log(record: Dict) -> Dict:
"""Transform API activity log record."""
return {
"id": str(record["id"]),
"created_at": str(record["created_at"]) if record.get("created_at") else None,
"token_identifier": str(record["token_identifier"]),
"path": record["path"],
"method": record["method"],
"query_params": record.get("query_params"),
"headers": record.get("headers"),
"body": record.get("body"),
"response_code": record["response_code"],
"response_body": record["response_body"],
"ip_address": record["ip_address"],
"user_agent": record["user_agent"],
"created_by_id": str(record["created_by_id"]),
}
def transform_email_log(record: Dict) -> Dict:
"""Transform email notification log record."""
return {
"id": str(record["id"]),
"created_at": str(record["created_at"]) if record.get("created_at") else None,
"receiver_id": str(record["receiver_id"]),
"triggered_by_id": str(record["triggered_by_id"]),
"entity_identifier": str(record["entity_identifier"]),
"entity_name": record["entity_name"],
"data": record["data"],
"processed_at": (
str(record["processed_at"]) if record.get("processed_at") else None
),
"sent_at": str(record["sent_at"]) if record.get("sent_at") else None,
"entity": record["entity"],
"old_value": str(record["old_value"]),
"new_value": str(record["new_value"]),
"created_by_id": str(record["created_by_id"]),
}
def transform_page_version(record: Dict) -> Dict:
"""Transform page version record."""
return {
"id": str(record["id"]),
"created_at": str(record["created_at"]) if record.get("created_at") else None,
"page_id": str(record["page_id"]),
"workspace_id": str(record["workspace_id"]),
"owned_by_id": str(record["owned_by_id"]),
"description_html": record["description_html"],
"description_binary": record["description_binary"],
"description_stripped": record["description_stripped"],
"description_json": record["description_json"],
"sub_pages_data": record["sub_pages_data"],
"created_by_id": str(record["created_by_id"]),
"updated_by_id": str(record["updated_by_id"]),
"deleted_at": str(record["deleted_at"]) if record.get("deleted_at") else None,
"last_saved_at": (
str(record["last_saved_at"]) if record.get("last_saved_at") else None
),
}
def transform_issue_description_version(record: Dict) -> Dict:
"""Transform issue description version record."""
return {
"id": str(record["id"]),
"created_at": str(record["created_at"]) if record.get("created_at") else None,
"issue_id": str(record["issue_id"]),
"workspace_id": str(record["workspace_id"]),
"project_id": str(record["project_id"]),
"created_by_id": str(record["created_by_id"]),
"updated_by_id": str(record["updated_by_id"]),
"owned_by_id": str(record["owned_by_id"]),
"last_saved_at": (
str(record["last_saved_at"]) if record.get("last_saved_at") else None
),
"description_binary": record["description_binary"],
"description_html": record["description_html"],
"description_stripped": record["description_stripped"],
"description_json": record["description_json"],
"deleted_at": str(record["deleted_at"]) if record.get("deleted_at") else None,
}
# Queryset functions for each cleanup task
def get_api_logs_queryset():
"""Get API logs older than cutoff days."""
cutoff_days = int(os.environ.get("HARD_DELETE_AFTER_DAYS", 30))
cutoff_time = timezone.now() - timedelta(days=cutoff_days)
logger.info(f"API logs cutoff time: {cutoff_time}")
return (
APIActivityLog.all_objects.filter(created_at__lte=cutoff_time)
.values(
"id",
"created_at",
"token_identifier",
"path",
"method",
"query_params",
"headers",
"body",
"response_code",
"response_body",
"ip_address",
"user_agent",
"created_by_id",
)
.iterator(chunk_size=BATCH_SIZE)
)
def get_email_logs_queryset():
"""Get email logs older than cutoff days."""
cutoff_days = int(os.environ.get("HARD_DELETE_AFTER_DAYS", 30))
cutoff_time = timezone.now() - timedelta(days=cutoff_days)
logger.info(f"Email logs cutoff time: {cutoff_time}")
return (
EmailNotificationLog.all_objects.filter(sent_at__lte=cutoff_time)
.values(
"id",
"created_at",
"receiver_id",
"triggered_by_id",
"entity_identifier",
"entity_name",
"data",
"processed_at",
"sent_at",
"entity",
"old_value",
"new_value",
"created_by_id",
)
.iterator(chunk_size=BATCH_SIZE)
)
def get_page_versions_queryset():
"""Get page versions beyond the maximum allowed (20 per page)."""
subq = (
PageVersion.all_objects.annotate(
row_num=Window(
expression=RowNumber(),
partition_by=[F("page_id")],
order_by=F("created_at").desc(),
)
)
.filter(row_num__gt=20)
.values("id")
)
return (
PageVersion.all_objects.filter(id__in=Subquery(subq))
.values(
"id",
"created_at",
"page_id",
"workspace_id",
"owned_by_id",
"description_html",
"description_binary",
"description_stripped",
"description_json",
"sub_pages_data",
"created_by_id",
"updated_by_id",
"deleted_at",
"last_saved_at",
)
.iterator(chunk_size=BATCH_SIZE)
)
def get_issue_description_versions_queryset():
"""Get issue description versions beyond the maximum allowed (20 per issue)."""
subq = (
IssueDescriptionVersion.all_objects.annotate(
row_num=Window(
expression=RowNumber(),
partition_by=[F("issue_id")],
order_by=F("created_at").desc(),
)
)
.filter(row_num__gt=20)
.values("id")
)
return (
IssueDescriptionVersion.all_objects.filter(id__in=Subquery(subq))
.values(
"id",
"created_at",
"issue_id",
"workspace_id",
"project_id",
"created_by_id",
"updated_by_id",
"owned_by_id",
"last_saved_at",
"description_binary",
"description_html",
"description_stripped",
"description_json",
"deleted_at",
)
.iterator(chunk_size=BATCH_SIZE)
)
# Celery tasks - now much simpler!
@shared_task
def delete_api_logs():
"""Delete old API activity logs."""
process_cleanup_task(
queryset_func=get_api_logs_queryset,
transform_func=transform_api_log,
model=APIActivityLog,
task_name="API Activity Log",
collection_name="api_activity_logs",
)
@shared_task
def delete_email_notification_logs():
"""Delete old email notification logs."""
process_cleanup_task(
queryset_func=get_email_logs_queryset,
transform_func=transform_email_log,
model=EmailNotificationLog,
task_name="Email Notification Log",
collection_name="email_notification_logs",
)
@shared_task
def delete_page_versions():
"""Delete excess page versions."""
process_cleanup_task(
queryset_func=get_page_versions_queryset,
transform_func=transform_page_version,
model=PageVersion,
task_name="Page Version",
collection_name="page_versions",
)
@shared_task
def delete_issue_description_versions():
"""Delete excess issue description versions."""
process_cleanup_task(
queryset_func=get_issue_description_versions_queryset,
transform_func=transform_issue_description_version,
model=IssueDescriptionVersion,
task_name="Issue Description Version",
collection_name="issue_description_versions",
)

View File

@@ -30,6 +30,8 @@ def page_version(page_id, existing_instance, user_id):
description_binary=page.description_binary,
owned_by_id=user_id,
last_saved_at=page.updated_at,
description_json=page.description,
description_stripped=page.description_stripped,
)
# If page versions are greater than 20 delete the oldest one

View File

@@ -92,6 +92,10 @@ def create_project_and_member(workspace: Workspace) -> Dict[int, uuid.UUID]:
name=workspace.name, # Use workspace name
identifier=project_identifier,
created_by_id=workspace.created_by_id,
# Enable all views in seed data
cycle_view=True,
module_view=True,
issue_views_view=True,
)
# Create project members

View File

@@ -50,9 +50,21 @@ app.conf.beat_schedule = {
"schedule": crontab(hour=2, minute=0), # UTC 02:00
},
"check-every-day-to-delete-api-logs": {
"task": "plane.bgtasks.api_logs_task.delete_api_logs",
"task": "plane.bgtasks.cleanup_task.delete_api_logs",
"schedule": crontab(hour=2, minute=30), # UTC 02:30
},
"check-every-day-to-delete-email-notification-logs": {
"task": "plane.bgtasks.cleanup_task.delete_email_notification_logs",
"schedule": crontab(hour=3, minute=0), # UTC 03:00
},
"check-every-day-to-delete-page-versions": {
"task": "plane.bgtasks.cleanup_task.delete_page_versions",
"schedule": crontab(hour=3, minute=30), # UTC 03:30
},
"check-every-day-to-delete-issue-description-versions": {
"task": "plane.bgtasks.cleanup_task.delete_issue_description_versions",
"schedule": crontab(hour=4, minute=0), # UTC 04:00
},
}

View File

@@ -0,0 +1,30 @@
# Generated by Django 4.2.22 on 2025-08-29 11:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("db", "0101_description_descriptionversion"),
]
operations = [
migrations.AddField(
model_name="page",
name="sort_order",
field=models.FloatField(default=65535),
),
migrations.AddField(
model_name="pagelog",
name="entity_type",
field=models.CharField(
blank=True, max_length=30, null=True, verbose_name="Entity Type"
),
),
migrations.AlterField(
model_name="pagelog",
name="entity_identifier",
field=models.UUIDField(blank=True, null=True),
),
]

View File

@@ -0,0 +1,75 @@
# Generated by Django 4.2.22 on 2025-09-01 14:33
from django.db import migrations, models
from django.contrib.postgres.operations import AddIndexConcurrently
class Migration(migrations.Migration):
atomic = False
dependencies = [
('db', '0102_page_sort_order_pagelog_entity_type_and_more'),
]
operations = [
AddIndexConcurrently(
model_name='fileasset',
index=models.Index(fields=['entity_type'], name='asset_entity_type_idx'),
),
AddIndexConcurrently(
model_name='fileasset',
index=models.Index(fields=['entity_identifier'], name='asset_entity_identifier_idx'),
),
AddIndexConcurrently(
model_name='fileasset',
index=models.Index(fields=['entity_type', 'entity_identifier'], name='asset_entity_idx'),
),
AddIndexConcurrently(
model_name='notification',
index=models.Index(fields=['entity_identifier'], name='notif_entity_identifier_idx'),
),
AddIndexConcurrently(
model_name='notification',
index=models.Index(fields=['entity_name'], name='notif_entity_name_idx'),
),
AddIndexConcurrently(
model_name='notification',
index=models.Index(fields=['read_at'], name='notif_read_at_idx'),
),
AddIndexConcurrently(
model_name='notification',
index=models.Index(fields=['receiver', 'read_at'], name='notif_entity_idx'),
),
AddIndexConcurrently(
model_name='pagelog',
index=models.Index(fields=['entity_type'], name='pagelog_entity_type_idx'),
),
AddIndexConcurrently(
model_name='pagelog',
index=models.Index(fields=['entity_identifier'], name='pagelog_entity_id_idx'),
),
AddIndexConcurrently(
model_name='pagelog',
index=models.Index(fields=['entity_name'], name='pagelog_entity_name_idx'),
),
AddIndexConcurrently(
model_name='pagelog',
index=models.Index(fields=['entity_type', 'entity_identifier'], name='pagelog_type_id_idx'),
),
AddIndexConcurrently(
model_name='pagelog',
index=models.Index(fields=['entity_name', 'entity_identifier'], name='pagelog_name_id_idx'),
),
AddIndexConcurrently(
model_name='userfavorite',
index=models.Index(fields=['entity_type'], name='fav_entity_type_idx'),
),
AddIndexConcurrently(
model_name='userfavorite',
index=models.Index(fields=['entity_identifier'], name='fav_entity_identifier_idx'),
),
AddIndexConcurrently(
model_name='userfavorite',
index=models.Index(fields=['entity_type', 'entity_identifier'], name='fav_entity_idx'),
),
]

View File

@@ -0,0 +1,43 @@
# Generated by Django 4.2.22 on 2025-09-03 05:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('db', '0103_fileasset_asset_entity_type_idx_and_more'),
]
operations = [
migrations.AddField(
model_name='cycleuserproperties',
name='rich_filters',
field=models.JSONField(default=dict),
),
migrations.AddField(
model_name='exporterhistory',
name='rich_filters',
field=models.JSONField(blank=True, default=dict, null=True),
),
migrations.AddField(
model_name='issueuserproperty',
name='rich_filters',
field=models.JSONField(default=dict),
),
migrations.AddField(
model_name='issueview',
name='rich_filters',
field=models.JSONField(default=dict),
),
migrations.AddField(
model_name='moduleuserproperties',
name='rich_filters',
field=models.JSONField(default=dict),
),
migrations.AddField(
model_name='workspaceuserproperties',
name='rich_filters',
field=models.JSONField(default=dict),
),
]

View File

@@ -0,0 +1,33 @@
# Generated by Django 4.2.22 on 2025-09-10 09:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("db", "0104_cycleuserproperties_rich_filters_and_more"),
]
operations = [
migrations.AlterField(
model_name="project",
name="cycle_view",
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name="project",
name="issue_views_view",
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name="project",
name="module_view",
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name="session",
name="user_id",
field=models.CharField(db_index=True, max_length=50, null=True),
),
]

View File

@@ -0,0 +1,152 @@
# Generated by Django 4.2.22 on 2025-09-12 08:45
import uuid
import django
from django.conf import settings
from django.db import migrations, models
def set_page_sort_order(apps, schema_editor):
Page = apps.get_model("db", "Page")
batch_size = 3000
sort_order = 100
# Get page IDs ordered by name using the historical model
# This should include all pages regardless of soft-delete status
page_ids = list(Page.objects.all().order_by("name").values_list("id", flat=True))
updated_pages = []
for page_id in page_ids:
# Create page instance with minimal data
updated_pages.append(Page(id=page_id, sort_order=sort_order))
sort_order += 100
# Bulk update when batch is full
if len(updated_pages) >= batch_size:
Page.objects.bulk_update(
updated_pages, ["sort_order"], batch_size=batch_size
)
updated_pages = []
# Update remaining pages
if updated_pages:
Page.objects.bulk_update(updated_pages, ["sort_order"], batch_size=batch_size)
def reverse_set_page_sort_order(apps, schema_editor):
Page = apps.get_model("db", "Page")
Page.objects.update(sort_order=Page.DEFAULT_SORT_ORDER)
class Migration(migrations.Migration):
dependencies = [
("db", "0105_alter_project_cycle_view_and_more"),
]
operations = [
migrations.CreateModel(
name="ProjectWebhook",
fields=[
(
"created_at",
models.DateTimeField(auto_now_add=True, verbose_name="Created At"),
),
(
"updated_at",
models.DateTimeField(
auto_now=True, verbose_name="Last Modified At"
),
),
(
"deleted_at",
models.DateTimeField(
blank=True, null=True, verbose_name="Deleted At"
),
),
(
"id",
models.UUIDField(
db_index=True,
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
unique=True,
),
),
(
"created_by",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="%(class)s_created_by",
to=settings.AUTH_USER_MODEL,
verbose_name="Created By",
),
),
(
"project",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="project_%(class)s",
to="db.project",
),
),
(
"updated_by",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="%(class)s_updated_by",
to=settings.AUTH_USER_MODEL,
verbose_name="Last Modified By",
),
),
(
"webhook",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="project_webhooks",
to="db.webhook",
),
),
(
"workspace",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="workspace_%(class)s",
to="db.workspace",
),
),
],
options={
"verbose_name": "Project Webhook",
"verbose_name_plural": "Project Webhooks",
"db_table": "project_webhooks",
"ordering": ("-created_at",),
},
),
migrations.AddConstraint(
model_name="projectwebhook",
constraint=models.UniqueConstraint(
condition=models.Q(("deleted_at__isnull", True)),
fields=("project", "webhook"),
name="project_webhook_unique_project_webhook_when_deleted_at_null",
),
),
migrations.AlterUniqueTogether(
name="projectwebhook",
unique_together={("project", "webhook", "deleted_at")},
),
migrations.AlterField(
model_name="issuerelation",
name="relation_type",
field=models.CharField(
default="blocked_by", max_length=20, verbose_name="Issue Relation Type"
),
),
migrations.RunPython(
set_page_sort_order, reverse_code=reverse_set_page_sort_order
),
]

View File

@@ -76,6 +76,15 @@ class FileAsset(BaseModel):
verbose_name_plural = "File Assets"
db_table = "file_assets"
ordering = ("-created_at",)
indexes = [
models.Index(fields=["entity_type"], name="asset_entity_type_idx"),
models.Index(
fields=["entity_identifier"], name="asset_entity_identifier_idx"
),
models.Index(
fields=["entity_type", "entity_identifier"], name="asset_entity_idx"
),
]
def __str__(self):
return str(self.asset)

View File

@@ -139,6 +139,7 @@ class CycleUserProperties(ProjectBaseModel):
filters = models.JSONField(default=get_default_filters)
display_filters = models.JSONField(default=get_default_display_filters)
display_properties = models.JSONField(default=get_default_display_properties)
rich_filters = models.JSONField(default=dict)
class Meta:
unique_together = ["cycle", "user", "deleted_at"]

View File

@@ -56,6 +56,7 @@ class ExporterHistory(BaseModel):
related_name="workspace_exporters",
)
filters = models.JSONField(blank=True, null=True)
rich_filters = models.JSONField(default=dict, blank=True, null=True)
class Meta:
verbose_name = "Exporter"

View File

@@ -41,6 +41,15 @@ class UserFavorite(WorkspaceBaseModel):
verbose_name_plural = "User Favorites"
db_table = "user_favorites"
ordering = ("-created_at",)
indexes = [
models.Index(fields=["entity_type"], name="fav_entity_type_idx"),
models.Index(
fields=["entity_identifier"], name="fav_entity_identifier_idx"
),
models.Index(
fields=["entity_type", "entity_identifier"], name="fav_entity_idx"
),
]
def save(self, *args, **kwargs):
if self._state.adding:

View File

@@ -284,6 +284,7 @@ class IssueRelationChoices(models.TextChoices):
BLOCKED_BY = "blocked_by", "Blocked By"
START_BEFORE = "start_before", "Start Before"
FINISH_BEFORE = "finish_before", "Finish Before"
IMPLEMENTED_BY = "implemented_by", "Implemented By"
class IssueRelation(ProjectBaseModel):
@@ -295,7 +296,6 @@ class IssueRelation(ProjectBaseModel):
)
relation_type = models.CharField(
max_length=20,
choices=IssueRelationChoices.choices,
verbose_name="Issue Relation Type",
default=IssueRelationChoices.BLOCKED_BY,
)
@@ -509,6 +509,7 @@ class IssueUserProperty(ProjectBaseModel):
filters = models.JSONField(default=get_default_filters)
display_filters = models.JSONField(default=get_default_display_filters)
display_properties = models.JSONField(default=get_default_display_properties)
rich_filters = models.JSONField(default=dict)
class Meta:
verbose_name = "Issue User Property"

View File

@@ -207,6 +207,7 @@ class ModuleUserProperties(ProjectBaseModel):
filters = models.JSONField(default=get_default_filters)
display_filters = models.JSONField(default=get_default_display_filters)
display_properties = models.JSONField(default=get_default_display_properties)
rich_filters = models.JSONField(default=dict)
class Meta:
unique_together = ["module", "user", "deleted_at"]

View File

@@ -39,6 +39,14 @@ class Notification(BaseModel):
verbose_name_plural = "Notifications"
db_table = "notifications"
ordering = ("-created_at",)
indexes = [
models.Index(
fields=["entity_identifier"], name="notif_entity_identifier_idx"
),
models.Index(fields=["entity_name"], name="notif_entity_name_idx"),
models.Index(fields=["read_at"], name="notif_read_at_idx"),
models.Index(fields=["receiver", "read_at"], name="notif_entity_idx"),
]
def __str__(self):
"""Return name of the notifications"""

View File

@@ -19,6 +19,7 @@ def get_view_props():
class Page(BaseModel):
PRIVATE_ACCESS = 1
PUBLIC_ACCESS = 0
DEFAULT_SORT_ORDER = 65535
ACCESS_CHOICES = ((PRIVATE_ACCESS, "Private"), (PUBLIC_ACCESS, "Public"))
@@ -57,6 +58,7 @@ class Page(BaseModel):
)
moved_to_page = models.UUIDField(null=True, blank=True)
moved_to_project = models.UUIDField(null=True, blank=True)
sort_order = models.FloatField(default=DEFAULT_SORT_ORDER)
external_id = models.CharField(max_length=255, null=True, blank=True)
external_source = models.CharField(max_length=255, null=True, blank=True)
@@ -98,8 +100,11 @@ class PageLog(BaseModel):
)
transaction = models.UUIDField(default=uuid.uuid4)
page = models.ForeignKey(Page, related_name="page_log", on_delete=models.CASCADE)
entity_identifier = models.UUIDField(null=True)
entity_identifier = models.UUIDField(null=True, blank=True)
entity_name = models.CharField(max_length=30, verbose_name="Transaction Type")
entity_type = models.CharField(
max_length=30, verbose_name="Entity Type", null=True, blank=True
)
workspace = models.ForeignKey(
"db.Workspace", on_delete=models.CASCADE, related_name="workspace_page_log"
)
@@ -110,6 +115,17 @@ class PageLog(BaseModel):
verbose_name_plural = "Page Logs"
db_table = "page_logs"
ordering = ("-created_at",)
indexes = [
models.Index(fields=["entity_type"], name="pagelog_entity_type_idx"),
models.Index(fields=["entity_identifier"], name="pagelog_entity_id_idx"),
models.Index(fields=["entity_name"], name="pagelog_entity_name_idx"),
models.Index(
fields=["entity_type", "entity_identifier"], name="pagelog_type_id_idx"
),
models.Index(
fields=["entity_name", "entity_identifier"], name="pagelog_name_id_idx"
),
]
def __str__(self):
return f"{self.page.name} {self.entity_name}"

View File

@@ -18,6 +18,12 @@ from .base import BaseModel
ROLE_CHOICES = ((20, "Admin"), (15, "Member"), (5, "Guest"))
class ROLE(Enum):
ADMIN = 20
MEMBER = 15
GUEST = 5
class ProjectNetwork(Enum):
SECRET = 0
PUBLIC = 2
@@ -89,9 +95,9 @@ class Project(BaseModel):
)
emoji = models.CharField(max_length=255, null=True, blank=True)
icon_prop = models.JSONField(null=True)
module_view = models.BooleanField(default=True)
cycle_view = models.BooleanField(default=True)
issue_views_view = models.BooleanField(default=True)
module_view = models.BooleanField(default=False)
cycle_view = models.BooleanField(default=False)
issue_views_view = models.BooleanField(default=False)
page_view = models.BooleanField(default=True)
intake_view = models.BooleanField(default=False)
is_time_tracking_enabled = models.BooleanField(default=False)

View File

@@ -13,7 +13,7 @@ VALID_KEY_CHARS = string.ascii_lowercase + string.digits
class Session(AbstractBaseSession):
device_info = models.JSONField(null=True, blank=True, default=None)
session_key = models.CharField(max_length=128, primary_key=True)
user_id = models.CharField(null=True, max_length=50)
user_id = models.CharField(null=True, max_length=50, db_index=True)
@classmethod
def get_session_store_class(cls):

View File

@@ -58,6 +58,7 @@ class IssueView(WorkspaceBaseModel):
filters = models.JSONField(default=dict)
display_filters = models.JSONField(default=get_default_display_filters)
display_properties = models.JSONField(default=get_default_display_properties)
rich_filters = models.JSONField(default=dict)
access = models.PositiveSmallIntegerField(
default=1, choices=((0, "Private"), (1, "Public"))
)

View File

@@ -7,7 +7,7 @@ from django.db import models
from django.core.exceptions import ValidationError
# Module imports
from plane.db.models import BaseModel
from plane.db.models import BaseModel, ProjectBaseModel
def generate_token():
@@ -90,3 +90,24 @@ class WebhookLog(BaseModel):
def __str__(self):
return f"{self.event_type} {str(self.webhook)}"
class ProjectWebhook(ProjectBaseModel):
webhook = models.ForeignKey(
"db.Webhook", on_delete=models.CASCADE, related_name="project_webhooks"
)
class Meta:
unique_together = ["project", "webhook", "deleted_at"]
constraints = [
models.UniqueConstraint(
fields=["project", "webhook"],
condition=models.Q(deleted_at__isnull=True),
name="project_webhook_unique_project_webhook_when_deleted_at_null",
)
]
verbose_name = "Project Webhook"
verbose_name_plural = "Project Webhooks"
db_table = "project_webhooks"
ordering = ("-created_at",)

View File

@@ -332,6 +332,7 @@ class WorkspaceUserProperties(BaseModel):
filters = models.JSONField(default=get_default_filters)
display_filters = models.JSONField(default=get_default_display_filters)
display_properties = models.JSONField(default=get_default_display_properties)
rich_filters = models.JSONField(default=dict)
class Meta:
unique_together = ["workspace", "user", "deleted_at"]

View File

@@ -34,6 +34,7 @@ from plane.authentication.adapter.error import (
AuthenticationException,
)
from plane.utils.ip_address import get_client_ip
from plane.utils.path_validator import get_safe_redirect_url
class InstanceAdminEndpoint(BaseAPIView):
@@ -392,7 +393,14 @@ class InstanceAdminSignOutEndpoint(View):
user.save()
# Log the user out
logout(request)
url = urljoin(base_host(request=request, is_admin=True))
url = get_safe_redirect_url(
base_url=base_host(request=request, is_admin=True),
next_path=""
)
return HttpResponseRedirect(url)
except Exception:
return HttpResponseRedirect(base_host(request=request, is_admin=True))
url = get_safe_redirect_url(
base_url=base_host(request=request, is_admin=True),
next_path=""
)
return HttpResponseRedirect(url)

View File

@@ -0,0 +1,18 @@
# Generated by Django 4.2.22 on 2025-09-11 08:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("license", "0005_rename_product_instance_edition_and_more"),
]
operations = [
migrations.AddField(
model_name="instance",
name="is_current_version_deprecated",
field=models.BooleanField(default=False),
),
]

View File

@@ -38,6 +38,8 @@ class Instance(BaseModel):
is_signup_screen_visited = models.BooleanField(default=False)
is_verified = models.BooleanField(default=False)
is_test = models.BooleanField(default=False)
# field for validating if the current version is deprecated
is_current_version_deprecated = models.BooleanField(default=False)
class Meta:
verbose_name = "Instance"

View File

@@ -284,7 +284,7 @@ CELERY_IMPORTS = (
"plane.bgtasks.exporter_expired_task",
"plane.bgtasks.file_asset_task",
"plane.bgtasks.email_notification_task",
"plane.bgtasks.api_logs_task",
"plane.bgtasks.cleanup_task",
"plane.license.bgtasks.tracer",
# management tasks
"plane.bgtasks.dummy_data_task",
@@ -465,3 +465,7 @@ if ENABLE_DRF_SPECTACULAR:
REST_FRAMEWORK["DEFAULT_SCHEMA_CLASS"] = "drf_spectacular.openapi.AutoSchema"
INSTALLED_APPS.append("drf_spectacular")
from .openapi import SPECTACULAR_SETTINGS # noqa: F401
# MongoDB Settings
MONGO_DB_URL = os.environ.get("MONGO_DB_URL", False)
MONGO_DB_DATABASE = os.environ.get("MONGO_DB_DATABASE", False)

View File

@@ -73,5 +73,10 @@ LOGGING = {
"handlers": ["console"],
"propagate": False,
},
"plane.mongo": {
"level": "INFO",
"handlers": ["console"],
"propagate": False,
},
},
}

View File

@@ -0,0 +1,124 @@
# Django imports
from django.conf import settings
import logging
# Third party imports
from pymongo import MongoClient
from pymongo.database import Database
from pymongo.collection import Collection
from typing import Optional, TypeVar, Type
T = TypeVar("T", bound="MongoConnection")
# Set up logger
logger = logging.getLogger("plane.mongo")
class MongoConnection:
"""
A singleton class that manages MongoDB connections.
This class ensures only one MongoDB connection is maintained throughout the application.
It provides methods to access the MongoDB client, database, and collections.
Attributes:
_instance (Optional[MongoConnection]): The singleton instance of this class
_client (Optional[MongoClient]): The MongoDB client instance
_db (Optional[Database]): The MongoDB database instance
"""
_instance: Optional["MongoConnection"] = None
_client: Optional[MongoClient] = None
_db: Optional[Database] = None
def __new__(cls: Type[T]) -> T:
"""
Creates a new instance of MongoConnection if one doesn't exist.
Returns:
MongoConnection: The singleton instance
"""
if cls._instance is None:
cls._instance = super(MongoConnection, cls).__new__(cls)
try:
mongo_url = getattr(settings, "MONGO_DB_URL", None)
mongo_db_database = getattr(settings, "MONGO_DB_DATABASE", None)
if not mongo_url or not mongo_db_database:
logger.warning(
"MongoDB connection parameters not configured. MongoDB functionality will be disabled."
)
return cls._instance
cls._client = MongoClient(mongo_url)
cls._db = cls._client[mongo_db_database]
# Test the connection
cls._client.server_info()
logger.info("MongoDB connection established successfully")
except Exception as e:
logger.warning(
f"Failed to initialize MongoDB connection: {str(e)}. MongoDB functionality will be disabled."
)
return cls._instance
@classmethod
def get_client(cls) -> Optional[MongoClient]:
"""
Returns the MongoDB client instance.
Returns:
Optional[MongoClient]: The MongoDB client instance or None if not configured
"""
if cls._client is None:
cls._instance = cls()
return cls._client
@classmethod
def get_db(cls) -> Optional[Database]:
"""
Returns the MongoDB database instance.
Returns:
Optional[Database]: The MongoDB database instance or None if not configured
"""
if cls._db is None:
cls._instance = cls()
return cls._db
@classmethod
def get_collection(cls, collection_name: str) -> Optional[Collection]:
"""
Returns a MongoDB collection by name.
Args:
collection_name (str): The name of the collection to retrieve
Returns:
Optional[Collection]: The MongoDB collection instance or None if not configured
"""
try:
db = cls.get_db()
if db is None:
logger.warning(
f"Cannot access collection '{collection_name}': MongoDB not configured"
)
return None
return db[collection_name]
except Exception as e:
logger.warning(f"Failed to access collection '{collection_name}': {str(e)}")
return None
@classmethod
def is_configured(cls) -> bool:
"""
Check if MongoDB is properly configured and connected.
Returns:
bool: True if MongoDB is configured and connected, False otherwise
"""
if cls._client is None:
cls._instance = cls()
return cls._client is not None and cls._db is not None

View File

@@ -83,5 +83,10 @@ LOGGING = {
"handlers": ["console"],
"propagate": False,
},
"plane.mongo": {
"level": "INFO",
"handlers": ["console"],
"propagate": False,
},
},
}

View File

@@ -30,7 +30,6 @@ from plane.db.models import (
)
from plane.utils.content_validator import (
validate_html_content,
validate_json_content,
validate_binary_data,
)
@@ -290,20 +289,22 @@ class IssueCreateSerializer(BaseSerializer):
raise serializers.ValidationError("Start date cannot exceed target date")
# Validate description content for security
if "description" in data and data["description"]:
is_valid, error_msg = validate_json_content(data["description"])
if not is_valid:
raise serializers.ValidationError({"description": error_msg})
if "description_html" in data and data["description_html"]:
is_valid, error_msg = validate_html_content(data["description_html"])
is_valid, error_msg, sanitized_html = validate_html_content(
data["description_html"]
)
if not is_valid:
raise serializers.ValidationError({"description_html": error_msg})
raise serializers.ValidationError(
{"error": "html content is not valid"}
)
# Update the data with sanitized HTML if available
if sanitized_html is not None:
data["description_html"] = sanitized_html
if "description_binary" in data and data["description_binary"]:
is_valid, error_msg = validate_binary_data(data["description_binary"])
if not is_valid:
raise serializers.ValidationError({"description_binary": error_msg})
raise serializers.ValidationError({"description_binary": "Invalid binary data"})
return data

View File

@@ -1,36 +1,14 @@
# Python imports
import base64
import json
import re
import nh3
from plane.utils.exception_logger import log_exception
from bs4 import BeautifulSoup
from collections import defaultdict
# Maximum allowed size for binary data (10MB)
MAX_SIZE = 10 * 1024 * 1024
# Maximum recursion depth to prevent stack overflow
MAX_RECURSION_DEPTH = 20
# Dangerous text patterns that could indicate XSS or script injection
DANGEROUS_TEXT_PATTERNS = [
r"<script[^>]*>.*?</script>",
r"javascript\s*:",
r"data\s*:\s*text/html",
r"eval\s*\(",
r"document\s*\.",
r"window\s*\.",
r"location\s*\.",
]
# Dangerous attribute patterns for HTML attributes
DANGEROUS_ATTR_PATTERNS = [
r"javascript\s*:",
r"data\s*:\s*text/html",
r"eval\s*\(",
r"alert\s*\(",
r"document\s*\.",
r"window\s*\.",
]
# Suspicious patterns for binary data content
SUSPICIOUS_BINARY_PATTERNS = [
"<html",
@@ -41,74 +19,11 @@ SUSPICIOUS_BINARY_PATTERNS = [
"<iframe",
]
# Malicious HTML patterns for content validation
MALICIOUS_HTML_PATTERNS = [
# Script tags with any content
r"<script[^>]*>",
r"</script>",
# JavaScript URLs in various attributes
r'(?:href|src|action)\s*=\s*["\']?\s*javascript:',
# Data URLs with text/html (potential XSS)
r'(?:href|src|action)\s*=\s*["\']?\s*data:text/html',
# Dangerous event handlers with JavaScript-like content
r'on(?:load|error|click|focus|blur|change|submit|reset|select|resize|scroll|unload|beforeunload|hashchange|popstate|storage|message|offline|online)\s*=\s*["\']?[^"\']*(?:javascript|alert|eval|document\.|window\.|location\.|history\.)[^"\']*["\']?',
# Object and embed tags that could load external content
r"<(?:object|embed)[^>]*(?:data|src)\s*=",
# Base tag that could change relative URL resolution
r"<base[^>]*href\s*=",
# Dangerous iframe sources
r'<iframe[^>]*src\s*=\s*["\']?(?:javascript:|data:text/html)',
# Meta refresh redirects
r'<meta[^>]*http-equiv\s*=\s*["\']?refresh["\']?',
# Link tags - simplified patterns
r'<link[^>]*rel\s*=\s*["\']?stylesheet["\']?',
r'<link[^>]*href\s*=\s*["\']?https?://',
r'<link[^>]*href\s*=\s*["\']?//',
r'<link[^>]*href\s*=\s*["\']?(?:data:|javascript:)',
# Style tags with external imports
r"<style[^>]*>.*?@import.*?(?:https?://|//)",
# Link tags with dangerous rel types
r'<link[^>]*rel\s*=\s*["\']?(?:import|preload|prefetch|dns-prefetch|preconnect)["\']?',
# Forms with action attributes
r"<form[^>]*action\s*=",
]
# Dangerous JavaScript patterns for event handlers
DANGEROUS_JS_PATTERNS = [
r"alert\s*\(",
r"eval\s*\(",
r"document\s*\.",
r"window\s*\.",
r"location\s*\.",
r"fetch\s*\(",
r"XMLHttpRequest",
r"innerHTML\s*=",
r"outerHTML\s*=",
r"document\.write",
r"script\s*>",
]
# HTML self-closing tags that don't need closing tags
SELF_CLOSING_TAGS = {
"img",
"br",
"hr",
"input",
"meta",
"link",
"area",
"base",
"col",
"embed",
"source",
"track",
"wbr",
}
def validate_binary_data(data):
"""
Validate that binary data appears to be valid document format and doesn't contain malicious content.
Validate that binary data appears to be a valid document format
and doesn't contain malicious content.
Args:
data (bytes or str): The binary data to validate, or base64-encoded string
@@ -149,191 +64,180 @@ def validate_binary_data(data):
return True, None
def validate_html_content(html_content):
# Combine custom components and editor-specific nodes into a single set of tags
CUSTOM_TAGS = {
# editor node/tag names
"mention-component",
"label",
"input",
"image-component",
}
ALLOWED_TAGS = nh3.ALLOWED_TAGS | CUSTOM_TAGS
# Merge nh3 defaults with all attributes used across our custom components
ATTRIBUTES = {
"*": {
"class",
"id",
"title",
"role",
"aria-label",
"aria-hidden",
"style",
"start",
"type",
# common editor data-* attributes seen in stored HTML
# (wildcards like data-* are NOT supported by nh3; we add known keys
# here and dynamically include all data-* seen in the input below)
"data-tight",
"data-node-type",
"data-type",
"data-checked",
"data-background-color",
"data-text-color",
"data-name",
# callout attributes
"data-icon-name",
"data-icon-color",
"data-background",
"data-emoji-unicode",
"data-emoji-url",
"data-logo-in-use",
"data-block-type",
},
"a": {"href", "target"},
# editor node/tag attributes
"image-component": {
"id",
"width",
"height",
"aspectRatio",
"aspectratio",
"src",
"alignment",
},
"img": {
"width",
"height",
"aspectRatio",
"aspectratio",
"alignment",
"src",
"alt",
"title",
},
"mention-component": {"id", "entity_identifier", "entity_name"},
"th": {
"colspan",
"rowspan",
"colwidth",
"background",
"hideContent",
"hidecontent",
"style",
},
"td": {
"colspan",
"rowspan",
"colwidth",
"background",
"textColor",
"textcolor",
"hideContent",
"hidecontent",
"style",
},
"tr": {"background", "textColor", "textcolor", "style"},
"pre": {"language"},
"code": {"language", "spellcheck"},
"input": {"type", "checked"},
}
SAFE_PROTOCOLS = {"http", "https", "mailto", "tel"}
def _compute_html_sanitization_diff(before_html: str, after_html: str):
"""
Validate that HTML content is safe and doesn't contain malicious patterns.
Compute a coarse diff between original and sanitized HTML.
Args:
html_content (str): The HTML content to validate
Returns a dict with:
- removed_tags: mapping[tag] -> removed_count
- removed_attributes: mapping[tag] -> sorted list of attribute names removed
"""
try:
Returns:
tuple: (is_valid: bool, error_message: str or None)
def collect(soup):
tag_counts = defaultdict(int)
attrs_by_tag = defaultdict(set)
for el in soup.find_all(True):
tag_name = (el.name or "").lower()
if not tag_name:
continue
tag_counts[tag_name] += 1
for attr_name in list(el.attrs.keys()):
if isinstance(attr_name, str) and attr_name:
attrs_by_tag[tag_name].add(attr_name.lower())
return tag_counts, attrs_by_tag
soup_before = BeautifulSoup(before_html or "", "html.parser")
soup_after = BeautifulSoup(after_html or "", "html.parser")
counts_before, attrs_before = collect(soup_before)
counts_after, attrs_after = collect(soup_after)
removed_tags = {}
for tag, cnt_before in counts_before.items():
cnt_after = counts_after.get(tag, 0)
if cnt_after < cnt_before:
removed = cnt_before - cnt_after
removed_tags[tag] = removed
removed_attributes = {}
for tag, before_set in attrs_before.items():
after_set = attrs_after.get(tag, set())
removed = before_set - after_set
if removed:
removed_attributes[tag] = sorted(list(removed))
return {"removed_tags": removed_tags, "removed_attributes": removed_attributes}
except Exception:
# Best-effort only; if diffing fails we don't block the request
return {"removed_tags": {}, "removed_attributes": {}}
def validate_html_content(html_content: str):
"""
Sanitize HTML content using nh3.
Returns a tuple: (is_valid, error_message, clean_html)
"""
if not html_content:
return True, None # Empty is OK
return True, None, None
# Size check - 10MB limit (consistent with binary validation)
if len(html_content.encode("utf-8")) > MAX_SIZE:
return False, "HTML content exceeds maximum size limit (10MB)"
# Check for specific malicious patterns (simplified and more reliable)
for pattern in MALICIOUS_HTML_PATTERNS:
if re.search(pattern, html_content, re.IGNORECASE | re.DOTALL):
return (
False,
f"HTML content contains potentially malicious patterns: {pattern}",
)
# Additional check for inline event handlers that contain suspicious content
# This is more permissive - only blocks if the event handler contains actual dangerous code
event_handler_pattern = r'on\w+\s*=\s*["\']([^"\']*)["\']'
event_matches = re.findall(event_handler_pattern, html_content, re.IGNORECASE)
for handler_content in event_matches:
for js_pattern in DANGEROUS_JS_PATTERNS:
if re.search(js_pattern, handler_content, re.IGNORECASE):
return (
False,
f"HTML content contains dangerous JavaScript in event handler: {handler_content[:100]}",
)
return True, None
def validate_json_content(json_content):
"""
Validate that JSON content is safe and doesn't contain malicious patterns.
Args:
json_content (dict): The JSON content to validate
Returns:
tuple: (is_valid: bool, error_message: str or None)
"""
if not json_content:
return True, None # Empty is OK
return False, "HTML content exceeds maximum size limit (10MB)", None
try:
# Size check - 10MB limit (consistent with other validations)
json_str = json.dumps(json_content)
if len(json_str.encode("utf-8")) > MAX_SIZE:
return False, "JSON content exceeds maximum size limit (10MB)"
clean_html = nh3.clean(
html_content,
tags=ALLOWED_TAGS,
attributes=ATTRIBUTES,
url_schemes=SAFE_PROTOCOLS,
)
# Report removals to logger (Sentry) if anything was stripped
diff = _compute_html_sanitization_diff(html_content, clean_html)
if diff.get("removed_tags") or diff.get("removed_attributes"):
try:
import json
# Basic structure validation for page description JSON
if isinstance(json_content, dict):
# Check for expected page description structure
# This is based on ProseMirror/Tiptap JSON structure
if "type" in json_content and json_content.get("type") == "doc":
# Valid document structure
if "content" in json_content and isinstance(
json_content["content"], list
):
# Recursively check content for suspicious patterns
is_valid, error_msg = _validate_json_content_array(
json_content["content"]
)
if not is_valid:
return False, error_msg
elif "type" not in json_content and "content" not in json_content:
# Allow other JSON structures but validate for suspicious content
is_valid, error_msg = _validate_json_content_recursive(json_content)
if not is_valid:
return False, error_msg
else:
return False, "JSON description must be a valid object"
except (TypeError, ValueError) as e:
return False, "Invalid JSON structure"
summary = json.dumps(diff)
except Exception:
summary = str(diff)
log_exception(
f"HTML sanitization removals: {summary}",
warning=True,
)
return True, None, clean_html
except Exception as e:
return False, "Failed to validate JSON content"
return True, None
def _validate_json_content_array(content, depth=0):
"""
Validate JSON content array for suspicious patterns.
Args:
content (list): Array of content nodes to validate
depth (int): Current recursion depth (default: 0)
Returns:
tuple: (is_valid: bool, error_message: str or None)
"""
# Check recursion depth to prevent stack overflow
if depth > MAX_RECURSION_DEPTH:
return False, f"Maximum recursion depth ({MAX_RECURSION_DEPTH}) exceeded"
if not isinstance(content, list):
return True, None
for node in content:
if isinstance(node, dict):
# Check text content for suspicious patterns (more targeted)
if node.get("type") == "text" and "text" in node:
text_content = node["text"]
for pattern in DANGEROUS_TEXT_PATTERNS:
if re.search(pattern, text_content, re.IGNORECASE):
return (
False,
"JSON content contains suspicious script patterns in text",
)
# Check attributes for suspicious content (more targeted)
if "attrs" in node and isinstance(node["attrs"], dict):
for attr_name, attr_value in node["attrs"].items():
if isinstance(attr_value, str):
# Only check specific attributes that could be dangerous
if attr_name.lower() in [
"href",
"src",
"action",
"onclick",
"onload",
"onerror",
]:
for pattern in DANGEROUS_ATTR_PATTERNS:
if re.search(pattern, attr_value, re.IGNORECASE):
return (
False,
f"JSON content contains dangerous pattern in {attr_name} attribute",
)
# Recursively check nested content
if "content" in node and isinstance(node["content"], list):
is_valid, error_msg = _validate_json_content_array(
node["content"], depth + 1
)
if not is_valid:
return False, error_msg
return True, None
def _validate_json_content_recursive(obj, depth=0):
"""
Recursively validate JSON object for suspicious content.
Args:
obj: JSON object (dict, list, or primitive) to validate
depth (int): Current recursion depth (default: 0)
Returns:
tuple: (is_valid: bool, error_message: str or None)
"""
# Check recursion depth to prevent stack overflow
if depth > MAX_RECURSION_DEPTH:
return False, f"Maximum recursion depth ({MAX_RECURSION_DEPTH}) exceeded"
if isinstance(obj, dict):
for key, value in obj.items():
if isinstance(value, str):
# Check for dangerous patterns using module constants
for pattern in DANGEROUS_TEXT_PATTERNS:
if re.search(pattern, value, re.IGNORECASE):
return (
False,
"JSON content contains suspicious script patterns",
)
elif isinstance(value, (dict, list)):
is_valid, error_msg = _validate_json_content_recursive(value, depth + 1)
if not is_valid:
return False, error_msg
elif isinstance(obj, list):
for item in obj:
is_valid, error_msg = _validate_json_content_recursive(item, depth + 1)
if not is_valid:
return False, error_msg
return True, None
log_exception(e)
return False, "Failed to sanitize HTML", None

View File

@@ -1,7 +1,7 @@
# Django imports
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.db.models import Q, UUIDField, Value, QuerySet
from django.db.models import Q, UUIDField, Value, QuerySet, OuterRef, Subquery
from django.db.models.functions import Coalesce
# Module imports
@@ -14,6 +14,9 @@ from plane.db.models import (
ProjectMember,
State,
WorkspaceMember,
IssueAssignee,
ModuleIssue,
IssueLabel,
)
from typing import Optional, Dict, Tuple, Any, Union, List
@@ -39,33 +42,52 @@ def issue_queryset_grouper(
if group_key in GROUP_FILTER_MAPPER:
queryset = queryset.filter(GROUP_FILTER_MAPPER[group_key])
issue_assignee_subquery = Subquery(
IssueAssignee.objects.filter(
issue_id=OuterRef("pk"),
deleted_at__isnull=True,
)
.values("issue_id")
.annotate(arr=ArrayAgg("assignee_id", distinct=True))
.values("arr")
)
issue_module_subquery = Subquery(
ModuleIssue.objects.filter(
issue_id=OuterRef("pk"),
deleted_at__isnull=True,
module__archived_at__isnull=True,
)
.values("issue_id")
.annotate(arr=ArrayAgg("module_id", distinct=True))
.values("arr")
)
issue_label_subquery = Subquery(
IssueLabel.objects.filter(issue_id=OuterRef("pk"), deleted_at__isnull=True)
.values("issue_id")
.annotate(arr=ArrayAgg("label_id", distinct=True))
.values("arr")
)
annotations_map: Dict[str, Tuple[str, Q]] = {
"assignee_ids": (
"assignees__id",
~Q(assignees__id__isnull=True) & Q(issue_assignee__deleted_at__isnull=True),
"assignee_ids": Coalesce(
issue_assignee_subquery, Value([], output_field=ArrayField(UUIDField()))
),
"label_ids": (
"labels__id",
~Q(labels__id__isnull=True) & Q(label_issue__deleted_at__isnull=True),
"label_ids": Coalesce(
issue_label_subquery, Value([], output_field=ArrayField(UUIDField()))
),
"module_ids": (
"issue_module__module_id",
(
~Q(issue_module__module_id__isnull=True)
& Q(issue_module__module__archived_at__isnull=True)
& Q(issue_module__deleted_at__isnull=True)
),
"module_ids": Coalesce(
issue_module_subquery, Value([], output_field=ArrayField(UUIDField()))
),
}
default_annotations: Dict[str, Any] = {
key: Coalesce(
ArrayAgg(field, distinct=True, filter=condition),
Value([], output_field=ArrayField(UUIDField())),
)
for key, (field, condition) in annotations_map.items()
if FIELD_MAPPER.get(key) != group_by or FIELD_MAPPER.get(key) != sub_group_by
}
default_annotations: Dict[str, Any] = {}
for key, expression in annotations_map.items():
if FIELD_MAPPER.get(key) in {group_by, sub_group_by}:
continue
default_annotations[key] = expression
return queryset.annotate(**default_annotations)

View File

@@ -476,6 +476,8 @@ def filter_subscribed_issues(params, issue_filter, method, prefix=""):
issue_filter[f"{prefix}issue_subscribers__subscriber_id__in"] = params.get(
"subscriber"
)
issue_filter[f"{prefix}issue_subscribers__deleted_at__isnull"] = True
return issue_filter

View File

@@ -6,12 +6,14 @@ def get_inverse_relation(relation_type):
"blocking": "blocked_by",
"start_before": "start_after",
"finish_before": "finish_after",
"implemented_by": "implements",
"implements": "implemented_by",
}
return relation_mapping.get(relation_type, relation_type)
def get_actual_relation(relation_type):
# This function is used to get the actual relation type which is store in database
# This function is used to get the actual relation type which is stored in database
actual_relation = {
"start_after": "start_before",
"finish_after": "finish_before",
@@ -19,6 +21,8 @@ def get_actual_relation(relation_type):
"blocked_by": "blocked_by",
"start_before": "start_before",
"finish_before": "finish_before",
"implemented_by": "implemented_by",
"implements": "implemented_by",
}
return actual_relation.get(relation_type, relation_type)

View File

@@ -2,20 +2,97 @@
from urllib.parse import urlparse
def _contains_suspicious_patterns(path: str) -> bool:
"""
Check for suspicious patterns that might indicate malicious intent.
Args:
path (str): The path to check
Returns:
bool: True if suspicious patterns found, False otherwise
"""
suspicious_patterns = [
r'javascript:', # JavaScript injection
r'data:', # Data URLs
r'vbscript:', # VBScript injection
r'file:', # File protocol
r'ftp:', # FTP protocol
r'%2e%2e', # URL encoded path traversal
r'%2f%2f', # URL encoded double slash
r'%5c%5c', # URL encoded backslashes
r'<script', # Script tags
r'<iframe', # Iframe tags
r'<object', # Object tags
r'<embed', # Embed tags
r'<form', # Form tags
r'onload=', # Event handlers
r'onerror=', # Event handlers
r'onclick=', # Event handlers
]
path_lower = path.lower()
for pattern in suspicious_patterns:
if pattern in path_lower:
return True
return False
def validate_next_path(next_path: str) -> str:
"""Validates that next_path is a valid path and extracts only the path component."""
"""Validates that next_path is a safe relative path for redirection."""
# Browsers interpret backslashes as forward slashes. Remove all backslashes.
if not next_path or not isinstance(next_path, str):
return ""
# Limit input length to prevent DoS attacks
if len(next_path) > 500:
return ""
next_path = next_path.replace("\\", "")
parsed_url = urlparse(next_path)
# Ensure next_path is not an absolute URL
# Block absolute URLs or anything with scheme/netloc
if parsed_url.scheme or parsed_url.netloc:
next_path = parsed_url.path # Extract only the path component
# Ensure it starts with a forward slash (indicating a valid relative path)
if not next_path.startswith("/"):
# Must start with a forward slash and not be empty
if not next_path or not next_path.startswith("/"):
return ""
# Ensure it does not contain dangerous path traversal sequences
# Prevent path traversal
if ".." in next_path:
return ""
# Additional security checks
if _contains_suspicious_patterns(next_path):
return ""
return next_path
def get_safe_redirect_url(base_url: str, next_path: str = "", params: dict = {}) -> str:
"""
Safely construct a redirect URL with validated next_path.
Args:
base_url (str): The base URL to redirect to
next_path (str): The next path to append
params (dict): The parameters to append
Returns:
str: The safe redirect URL
"""
from urllib.parse import urlencode
# Validate the next path
validated_path = validate_next_path(next_path)
# Add the next path to the parameters
if validated_path:
params["next_path"] = validated_path
# Return the safe redirect URL
return f"{base_url.rstrip('/')}?{urlencode(params)}"

View File

@@ -1,7 +1,7 @@
# base requirements
# django
Django==4.2.22
Django==4.2.24
# rest framework
djangorestframework==3.15.2
# postgres
@@ -9,6 +9,8 @@ psycopg==3.1.18
psycopg-binary==3.1.18
psycopg-c==3.1.18
dj-database-url==2.1.0
# mongo
pymongo==4.6.3
# redis
redis==5.0.4
django-redis==5.4.0
@@ -66,4 +68,6 @@ opentelemetry-sdk==1.28.1
opentelemetry-instrumentation-django==0.49b1
opentelemetry-exporter-otlp==1.28.1
# OpenAPI Specification
drf-spectacular==0.28.0
drf-spectacular==0.28.0
# html sanitizer
nh3==0.2.18

4
apps/live/.eslintrc.cjs Normal file
View File

@@ -0,0 +1,4 @@
module.exports = {
root: true,
extends: ["@plane/eslint-config/server.js"],
};

View File

@@ -1,5 +0,0 @@
{
"root": true,
"extends": ["@plane/eslint-config/server.js"],
"parser": "@typescript-eslint/parser"
}

View File

@@ -1,14 +1,14 @@
{
"name": "live",
"version": "0.28.0",
"version": "1.0.0",
"license": "AGPL-3.0",
"description": "A realtime collaborative server powers Plane's rich text editor",
"main": "./src/server.ts",
"private": true,
"type": "module",
"scripts": {
"dev": "tsup --watch --onSuccess 'node --env-file=.env dist/server.js'",
"build": "tsc --noEmit && tsup",
"build": "tsdown",
"dev": "tsdown --watch",
"start": "node --env-file=.env dist/server.js",
"check:lint": "eslint . --max-warnings 10",
"check:types": "tsc --noEmit",
@@ -28,7 +28,7 @@
"@plane/types": "workspace:*",
"@tiptap/core": "^2.22.3",
"@tiptap/html": "^2.22.3",
"axios": "1.11.0",
"axios": "catalog:",
"compression": "1.8.1",
"cors": "^2.8.5",
"dotenv": "^16.4.5",
@@ -36,11 +36,11 @@
"express-ws": "^5.0.2",
"helmet": "^7.1.0",
"ioredis": "^5.4.1",
"lodash": "^4.17.21",
"lodash": "catalog:",
"morgan": "1.10.1",
"pino-http": "^10.3.0",
"pino-pretty": "^11.2.2",
"uuid": "^10.0.0",
"uuid": "catalog:",
"y-prosemirror": "^1.2.15",
"y-protocols": "^1.0.6",
"yjs": "^13.6.20"
@@ -58,8 +58,8 @@
"concurrently": "^9.0.1",
"nodemon": "^3.1.7",
"ts-node": "^10.9.2",
"tsup": "8.4.0",
"typescript": "5.8.3",
"tsdown": "catalog:",
"typescript": "catalog:",
"ws": "^8.18.3"
}
}

View File

@@ -1,20 +1,17 @@
// Third-party libraries
import { Redis } from "ioredis";
// Hocuspocus extensions and core
import { Database } from "@hocuspocus/extension-database";
import { Extension } from "@hocuspocus/server";
import { Logger } from "@hocuspocus/extension-logger";
import { Redis as HocusPocusRedis } from "@hocuspocus/extension-redis";
import { Extension } from "@hocuspocus/server";
import { Redis } from "ioredis";
// core helpers and utilities
import { manualLogger } from "@/core/helpers/logger.js";
import { getRedisUrl } from "@/core/lib/utils/redis-url.js";
// core libraries
import { fetchPageDescriptionBinary, updatePageDescription } from "@/core/lib/page.js";
import { getRedisUrl } from "@/core/lib/utils/redis-url.js";
import { type HocusPocusServerContext, type TDocumentTypes } from "@/core/types/common.js";
// plane live libraries
import { fetchDocument } from "@/plane-live/lib/fetch-document.js";
import { updateDocument } from "@/plane-live/lib/update-document.js";
// types
import { type HocusPocusServerContext, type TDocumentTypes } from "@/core/types/common.js";
export const getExtensions: () => Promise<Extension[]> = async () => {
const extensions: Extension[] = [

View File

@@ -1,12 +1,12 @@
import { Server } from "@hocuspocus/server";
import { v4 as uuidv4 } from "uuid";
// lib
import { handleAuthentication } from "@/core/lib/authentication.js";
// extensions
import { getExtensions } from "@/core/extensions/index.js";
import { DocumentCollaborativeEvents, TDocumentEventsServer } from "@plane/editor/lib";
// editor types
import { TUserDetails } from "@plane/editor";
import { DocumentCollaborativeEvents, TDocumentEventsServer } from "@plane/editor/lib";
// extensions
import { getExtensions } from "@/core/extensions/index.js";
// lib
import { handleAuthentication } from "@/core/lib/authentication.js";
// types
import { type HocusPocusServerContext } from "@/core/types/common.js";

View File

@@ -1,7 +1,7 @@
// services
import { UserService } from "@/core/services/user.service.js";
// core helpers
import { manualLogger } from "@/core/helpers/logger.js";
// services
import { UserService } from "@/core/services/user.service.js";
const userService = new UserService();

View File

@@ -14,6 +14,7 @@ export abstract class APIService {
this.axiosInstance = axios.create({
baseURL,
withCredentials: true,
timeout: 20000,
});
}

View File

@@ -1,13 +1,13 @@
import compression from "compression";
import cors from "cors";
import expressWs from "express-ws";
import express, { Request, Response } from "express";
import expressWs from "express-ws";
import helmet from "helmet";
// hocuspocus server
import { getHocusPocusServer } from "@/core/hocuspocus-server.js";
// helpers
import { convertHTMLDocumentToAllFormats } from "@/core/helpers/convert-document.js";
import { logger, manualLogger } from "@/core/helpers/logger.js";
import { getHocusPocusServer } from "@/core/hocuspocus-server.js";
// types
import { TConvertDocumentRequestBody } from "@/core/types/common.js";

View File

@@ -21,6 +21,6 @@
"emitDecoratorMetadata": true,
"sourceRoot": "/"
},
"include": ["src/**/*.ts", "tsup.config.ts"],
"include": ["src/**/*.ts", "tsdown.config.ts"],
"exclude": ["./dist", "./build", "./node_modules"]
}

View File

@@ -0,0 +1,7 @@
import { defineConfig } from "tsdown";
export default defineConfig({
entry: ["src/server.ts"],
outDir: "dist",
format: ["esm", "cjs"],
});

View File

@@ -1,15 +0,0 @@
import { defineConfig } from "tsup";
export default defineConfig({
entry: ["src/server.ts"],
format: ["esm", "cjs"],
dts: true,
splitting: false,
sourcemap: true,
minify: false,
target: "node18",
outDir: "dist",
env: {
NODE_ENV: process.env.NODE_ENV || "development",
},
});

12
apps/space/.eslintignore Normal file
View File

@@ -0,0 +1,12 @@
.next/*
out/*
public/*
dist/*
node_modules/*
.turbo/*
.env*
.env
.env.local
.env.development
.env.production
.env.test

Some files were not shown because too many files have changed in this diff Show More