fix: merge conflicts resolved

This commit is contained in:
sriram veeraghanta
2024-05-28 15:41:27 +05:30
182 changed files with 8693 additions and 599 deletions

View File

@@ -31,3 +31,5 @@ USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
MONGO_DB_URL="mongodb://plane-mongodb:27017/"

467
.github/workflows/build-branch-ee.yml vendored Normal file
View File

@@ -0,0 +1,467 @@
name: Branch Build Enterprise
on:
workflow_dispatch:
inputs:
arm64:
description: "Build for ARM64 architecture"
required: false
default: false
type: boolean
push:
branches:
- master
- preview
- develop
release:
types: [released, prereleased]
env:
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
ARM64_BUILD: ${{ github.event.inputs.arm64 }}
jobs:
branch_build_setup:
name: Build Setup
runs-on: ${{vars.ACTION_RUNS_ON}}
outputs:
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
build_web: ${{ steps.changed_files.outputs.web_any_changed }}
build_admin: ${{ steps.changed_files.outputs.admin_any_changed }}
build_space: ${{ steps.changed_files.outputs.space_any_changed }}
build_apiserver: ${{ steps.changed_files.outputs.apiserver_any_changed }}
build_proxy: ${{ steps.changed_files.outputs.proxy_any_changed }}
artifact_upload_to_s3: ${{ steps.set_env_variables.outputs.artifact_upload_to_s3 }}
artifact_s3_suffix: ${{ steps.set_env_variables.outputs.artifact_s3_suffix }}
steps:
- id: set_env_variables
name: Set Environment Variables
run: |
if [ "${{ env.TARGET_BRANCH }}" == "master" ] || [ "${{ github.event_name }}" == "release" ] || [ "${{ env.ARM64_BUILD }}" == "true" ] ; then
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
else
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
fi
BR_NAME=$( echo "${{ env.TARGET_BRANCH }}" | tr / -)
echo "TARGET_BRANCH=$BR_NAME" >> $GITHUB_OUTPUT
if [ "${{ github.event_name }}" == "release" ]; then
echo "artifact_upload_to_s3=true" >> $GITHUB_OUTPUT
echo "artifact_s3_suffix=${{ github.event.release.tag_name }}" >> $GITHUB_OUTPUT
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
echo "artifact_upload_to_s3=true" >> $GITHUB_OUTPUT
echo "artifact_s3_suffix=latest" >> $GITHUB_OUTPUT
elif [ "${{ env.TARGET_BRANCH }}" == "preview" ] || [ "${{ env.TARGET_BRANCH }}" == "develop" ]; then
echo "artifact_upload_to_s3=true" >> $GITHUB_OUTPUT
echo "artifact_s3_suffix=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
else
echo "artifact_upload_to_s3=false" >> $GITHUB_OUTPUT
echo "artifact_s3_suffix=$BR_NAME" >> $GITHUB_OUTPUT
fi
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
- name: Get changed files
id: changed_files
uses: tj-actions/changed-files@v42
with:
files_yaml: |
apiserver:
- apiserver/**
proxy:
- nginx/**
admin:
- admin/**
- packages/**
- "package.json"
- "yarn.lock"
- "tsconfig.json"
- "turbo.json"
space:
- space/**
- packages/**
- "package.json"
- "yarn.lock"
- "tsconfig.json"
- "turbo.json"
web:
- web/**
- packages/**
- "package.json"
- "yarn.lock"
- "tsconfig.json"
- "turbo.json"
branch_build_push_admin:
if: ${{ needs.branch_build_setup.outputs.build_admin== 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
name: Build-Push Admin Docker Image
runs-on: ${{vars.ACTION_RUNS_ON}}
needs: [branch_build_setup]
env:
ADMIN_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/admin-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Set Admin Docker Tag
run: |
if [ "${{ github.event_name }}" == "release" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/admin-enterprise:stable
TAG=${TAG},${{ secrets.DOCKERHUB_USERNAME }}/admin-enterprise:${{ github.event.release.tag_name }}
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/admin-enterprise:stable
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/admin-enterprise:${{ github.event.release.tag_name }}
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/admin-enterprise:latest
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/admin-enterprise:latest
else
TAG=${{ env.ADMIN_TAG }}
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/admin-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
fi
echo "ADMIN_TAG=${TAG}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Harbor
uses: docker/login-action@v3
with:
username: ${{ secrets.HARBOR_USERNAME }}
password: ${{ secrets.HARBOR_TOKEN }}
registry: ${{ vars.HARBOR_REGISTRY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: ${{ env.BUILDX_DRIVER }}
version: ${{ env.BUILDX_VERSION }}
endpoint: ${{ env.BUILDX_ENDPOINT }}
- name: Check out the repo
uses: actions/checkout@v4
- name: Build and Push Frontend to Docker Container Registry
uses: docker/build-push-action@v5.1.0
with:
context: .
file: ./admin/Dockerfile.admin
platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.ADMIN_TAG }}
push: true
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_web:
if: ${{ needs.branch_build_setup.outputs.build_web == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
name: Build-Push Web Docker Image
runs-on: ${{vars.ACTION_RUNS_ON}}
needs: [branch_build_setup]
env:
WEB_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/web-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Set Web Docker Tag
run: |
if [ "${{ github.event_name }}" == "release" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/web-enterprise:stable
TAG=${TAG},${{ secrets.DOCKERHUB_USERNAME }}/web-enterprise:${{ github.event.release.tag_name }}
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/web-enterprise:stable
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/web-enterprise:${{ github.event.release.tag_name }}
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/web-enterprise:latest
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/web-enterprise:latest
else
TAG=${{ env.WEB_TAG }}
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/web-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
fi
echo "WEB_TAG=${TAG}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Harbor
uses: docker/login-action@v3
with:
username: ${{ secrets.HARBOR_USERNAME }}
password: ${{ secrets.HARBOR_TOKEN }}
registry: ${{ vars.HARBOR_REGISTRY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: ${{ env.BUILDX_DRIVER }}
version: ${{ env.BUILDX_VERSION }}
endpoint: ${{ env.BUILDX_ENDPOINT }}
- name: Check out the repo
uses: actions/checkout@v4
- name: Build and Push Web to Docker Container Registry
uses: docker/build-push-action@v5.1.0
with:
context: .
file: ./web/Dockerfile.web
platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.WEB_TAG }}
push: true
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_space:
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
name: Build-Push Space Docker Image
runs-on: ${{vars.ACTION_RUNS_ON}}
needs: [branch_build_setup]
env:
SPACE_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/space-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Set Space Docker Tag
run: |
if [ "${{ github.event_name }}" == "release" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/space-enterprise:stable
TAG=${TAG},${{ secrets.DOCKERHUB_USERNAME }}/space-enterprise:${{ github.event.release.tag_name }}
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/space-enterprise:stable
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/space-enterprise:${{ github.event.release.tag_name }}
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/space-enterprise:latest
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/space-enterprise:latest
else
TAG=${{ env.SPACE_TAG }}
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/space-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
fi
echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Harbor
uses: docker/login-action@v3
with:
username: ${{ secrets.HARBOR_USERNAME }}
password: ${{ secrets.HARBOR_TOKEN }}
registry: ${{ vars.HARBOR_REGISTRY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: ${{ env.BUILDX_DRIVER }}
version: ${{ env.BUILDX_VERSION }}
endpoint: ${{ env.BUILDX_ENDPOINT }}
- name: Check out the repo
uses: actions/checkout@v4
- name: Build and Push Space to Docker Hub
uses: docker/build-push-action@v5.1.0
with:
context: .
file: ./space/Dockerfile.space
platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.SPACE_TAG }}
push: true
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_apiserver:
if: ${{ needs.branch_build_setup.outputs.build_apiserver == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
name: Build-Push API Server Docker Image
runs-on: ${{vars.ACTION_RUNS_ON}}
needs: [branch_build_setup]
env:
BACKEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/backend-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Set Backend Docker Tag
run: |
if [ "${{ github.event_name }}" == "release" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/backend-enterprise:stable
TAG=${TAG},${{ secrets.DOCKERHUB_USERNAME }}/backend-enterprise:${{ github.event.release.tag_name }}
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/backend-enterprise:stable
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/backend-enterprise:${{ github.event.release.tag_name }}
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/backend-enterprise:latest
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/backend-enterprise:latest
else
TAG=${{ env.BACKEND_TAG }}
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/backend-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
fi
echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Harbor
uses: docker/login-action@v3
with:
username: ${{ secrets.HARBOR_USERNAME }}
password: ${{ secrets.HARBOR_TOKEN }}
registry: ${{ vars.HARBOR_REGISTRY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: ${{ env.BUILDX_DRIVER }}
version: ${{ env.BUILDX_VERSION }}
endpoint: ${{ env.BUILDX_ENDPOINT }}
- name: Check out the repo
uses: actions/checkout@v4
- name: Build and Push Backend to Docker Hub
uses: docker/build-push-action@v5.1.0
with:
context: ./apiserver
file: ./apiserver/Dockerfile.api
platforms: ${{ env.BUILDX_PLATFORMS }}
push: true
tags: ${{ env.BACKEND_TAG }}
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_proxy:
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
name: Build-Push Proxy Docker Image
runs-on: ${{vars.ACTION_RUNS_ON}}
needs: [branch_build_setup]
env:
PROXY_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/proxy-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Set Proxy Docker Tag
run: |
if [ "${{ github.event_name }}" == "release" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/proxy-enterprise:stable
TAG=${TAG},${{ secrets.DOCKERHUB_USERNAME }}/proxy-enterprise:${{ github.event.release.tag_name }}
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/proxy-enterprise:stable
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/proxy-enterprise:${{ github.event.release.tag_name }}
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/proxy-enterprise:latest
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/proxy-enterprise:latest
else
TAG=${{ env.PROXY_TAG }}
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/proxy-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
fi
echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Harbor
uses: docker/login-action@v3
with:
username: ${{ secrets.HARBOR_USERNAME }}
password: ${{ secrets.HARBOR_TOKEN }}
registry: ${{ vars.HARBOR_REGISTRY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: ${{ env.BUILDX_DRIVER }}
version: ${{ env.BUILDX_VERSION }}
endpoint: ${{ env.BUILDX_ENDPOINT }}
- name: Check out the repo
uses: actions/checkout@v4
- name: Build and Push Plane-Proxy to Docker Hub
uses: docker/build-push-action@v5.1.0
with:
context: ./nginx
file: ./nginx/Dockerfile
platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.PROXY_TAG }}
push: true
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
upload_artifacts_s3:
if: ${{ needs.branch_build_setup.outputs.artifact_upload_to_s3 == 'true' }}
name: Upload artifacts to S3 Bucket
runs-on: ${{vars.ACTION_RUNS_ON}}
needs: [branch_build_setup]
container:
image: docker:20.10.7
credentials:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
env:
ARTIFACT_SUFFIX: ${{ needs.branch_build_setup.outputs.artifact_s3_suffix }}
AWS_ACCESS_KEY_ID: ${{ secrets.SELF_HOST_BUCKET_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.SELF_HOST_BUCKET_SECRET_KEY }}
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
steps:
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
- name: Upload artifacts
run: |
apk update
apk add --no-cache aws-cli
mkdir -p ~/${{ env.ARTIFACT_SUFFIX }}
cp deploy/cli-install/variables.env ~/${{ env.ARTIFACT_SUFFIX }}/variables.env
cp deploy/cli-install/Caddyfile ~/${{ env.ARTIFACT_SUFFIX }}/Caddyfile
sed -e 's@${APP_RELEASE_VERSION}@'${{ env.ARTIFACT_SUFFIX }}'@' deploy/cli-install/docker-compose.yml > ~/${{ env.ARTIFACT_SUFFIX }}/docker-compose.yml
sed -e 's@${APP_RELEASE_VERSION}@'${{ env.ARTIFACT_SUFFIX }}'@' deploy/cli-install/docker-compose-caddy.yml > ~/${{ env.ARTIFACT_SUFFIX }}/docker-compose-caddy.yml
aws s3 cp ~/${{ env.ARTIFACT_SUFFIX }} s3://${{ vars.SELF_HOST_BUCKET_NAME }}/plane-enterprise/${{ env.ARTIFACT_SUFFIX }} --recursive
rm -rf ~/${{ env.ARTIFACT_SUFFIX }}

70
.github/workflows/create-release.yml vendored Normal file
View File

@@ -0,0 +1,70 @@
name: Manual Release Workflow
on:
workflow_dispatch:
inputs:
release_tag:
description: 'Release Tag (e.g., v0.16-cannary-1)'
required: true
prerelease:
description: 'Pre-Release'
required: true
default: true
type: boolean
draft:
description: 'Draft'
required: true
default: true
type: boolean
permissions:
contents: write
jobs:
create-release:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
with:
fetch-depth: 0 # Necessary to fetch all history for tags
- name: Set up Git
run: |
git config user.name "github-actions"
git config user.email "github-actions@github.com"
- name: Check for the Prerelease
run: |
echo ${{ github.event.release.prerelease }}
- name: Generate Release Notes
id: generate_notes
run: |
bash ./generate_release_notes.sh
# Directly use the content of RELEASE_NOTES.md for the release body
RELEASE_NOTES=$(cat RELEASE_NOTES.md)
echo "RELEASE_NOTES<<EOF" >> $GITHUB_ENV
echo "$RELEASE_NOTES" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
- name: Create Tag
run: |
git tag ${{ github.event.inputs.release_tag }}
git push origin ${{ github.event.inputs.release_tag }}
- name: Create GitHub Release
uses: softprops/action-gh-release@v1
with:
tag_name: ${{ github.event.inputs.release_tag }}
body_path: RELEASE_NOTES.md
draft: ${{ github.event.inputs.draft }}
prerelease: ${{ github.event.inputs.prerelease }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -3,3 +3,7 @@ export * from "./password-config-switch";
export * from "./authentication-method-card";
export * from "./github-config";
export * from "./google-config";
// enterprise
export * from "./oidc-config";
export * from "./saml-config";

View File

@@ -0,0 +1,72 @@
"use client";
import React from "react";
import { observer } from "mobx-react-lite";
import Link from "next/link";
// icons
import { Settings2 } from "lucide-react";
// types
import { TInstanceEnterpriseAuthenticationMethodKeys } from "@plane/types";
// ui
import { ToggleSwitch, getButtonStyling } from "@plane/ui";
// helpers
import { cn } from "@/helpers/common.helper";
// hooks
import { useInstance } from "@/hooks/store";
type Props = {
disabled: boolean;
updateConfig: (
key: TInstanceEnterpriseAuthenticationMethodKeys,
value: string
) => void;
};
export const OIDCConfiguration: React.FC<Props> = observer((props) => {
const { disabled, updateConfig } = props;
// store
const { formattedConfig } = useInstance();
// derived values
const enableOIDCConfig = formattedConfig?.IS_OIDC_ENABLED ?? "";
const isOIDCConfigured =
!!formattedConfig?.OIDC_CLIENT_ID && !!formattedConfig?.OIDC_CLIENT_SECRET;
return (
<>
{isOIDCConfigured ? (
<div className="flex items-center gap-4">
<Link
href="/authentication/oidc"
className={cn(
getButtonStyling("link-primary", "md"),
"font-medium"
)}
>
Edit
</Link>
<ToggleSwitch
value={Boolean(parseInt(enableOIDCConfig))}
onChange={() => {
Boolean(parseInt(enableOIDCConfig)) === true
? updateConfig("IS_OIDC_ENABLED", "0")
: updateConfig("IS_OIDC_ENABLED", "1");
}}
size="sm"
disabled={disabled}
/>
</div>
) : (
<Link
href="/authentication/oidc"
className={cn(
getButtonStyling("neutral-primary", "sm"),
"text-custom-text-300"
)}
>
<Settings2 className="h-4 w-4 p-0.5 text-custom-text-300/80" />
Configure
</Link>
)}
</>
);
});

View File

@@ -0,0 +1,72 @@
"use client";
import React from "react";
import { observer } from "mobx-react-lite";
import Link from "next/link";
// icons
import { Settings2 } from "lucide-react";
// types
import { TInstanceEnterpriseAuthenticationMethodKeys } from "@plane/types";
// ui
import { ToggleSwitch, getButtonStyling } from "@plane/ui";
// helpers
import { cn } from "@/helpers/common.helper";
// hooks
import { useInstance } from "@/hooks/store";
type Props = {
disabled: boolean;
updateConfig: (
key: TInstanceEnterpriseAuthenticationMethodKeys,
value: string
) => void;
};
export const SAMLConfiguration: React.FC<Props> = observer((props) => {
const { disabled, updateConfig } = props;
// store
const { formattedConfig } = useInstance();
// derived values
const enableSAMLConfig = formattedConfig?.IS_SAML_ENABLED ?? "";
const isSAMLConfigured =
!!formattedConfig?.SAML_ENTITY_ID && !!formattedConfig?.SAML_CERTIFICATE;
return (
<>
{isSAMLConfigured ? (
<div className="flex items-center gap-4">
<Link
href="/authentication/saml"
className={cn(
getButtonStyling("link-primary", "md"),
"font-medium"
)}
>
Edit
</Link>
<ToggleSwitch
value={Boolean(parseInt(enableSAMLConfig))}
onChange={() => {
Boolean(parseInt(enableSAMLConfig)) === true
? updateConfig("IS_SAML_ENABLED", "0")
: updateConfig("IS_SAML_ENABLED", "1");
}}
size="sm"
disabled={disabled}
/>
</div>
) : (
<Link
href="/authentication/saml"
className={cn(
getButtonStyling("neutral-primary", "sm"),
"text-custom-text-300"
)}
>
<Settings2 className="h-4 w-4 p-0.5 text-custom-text-300/80" />
Configure
</Link>
)}
</>
);
});

View File

@@ -0,0 +1,222 @@
import { FC, useState } from "react";
import Link from "next/link";
import { useForm } from "react-hook-form";
// types
import { IFormattedInstanceConfiguration, TInstanceOIDCAuthenticationConfigurationKeys } from "@plane/types";
// ui
import { Button, TOAST_TYPE, getButtonStyling, setToast } from "@plane/ui";
// components
import {
ConfirmDiscardModal,
ControllerInput,
TControllerInputFormField,
CopyField,
TCopyField,
} from "@/components/common";
// helpers
import { cn } from "@/helpers/common.helper";
// hooks
import { useInstance } from "@/hooks/store";
type Props = {
config: IFormattedInstanceConfiguration;
};
type OIDCConfigFormValues = Record<TInstanceOIDCAuthenticationConfigurationKeys, string>;
export const InstanceOIDCConfigForm: FC<Props> = (props) => {
const { config } = props;
// states
const [isDiscardChangesModalOpen, setIsDiscardChangesModalOpen] = useState(false);
// store hooks
const { updateInstanceConfigurations } = useInstance();
// form data
const {
handleSubmit,
control,
reset,
formState: { errors, isDirty, isSubmitting },
} = useForm<OIDCConfigFormValues>({
defaultValues: {
OIDC_CLIENT_ID: config["OIDC_CLIENT_ID"],
OIDC_CLIENT_SECRET: config["OIDC_CLIENT_SECRET"],
OIDC_TOKEN_URL: config["OIDC_TOKEN_URL"],
OIDC_USERINFO_URL: config["OIDC_USERINFO_URL"],
OIDC_AUTHORIZE_URL: config["OIDC_AUTHORIZE_URL"],
OIDC_LOGOUT_URL: config["OIDC_LOGOUT_URL"],
OIDC_PROVIDER_NAME: config["OIDC_PROVIDER_NAME"],
},
});
const originURL = typeof window !== "undefined" ? window.location.origin : "";
const OIDC_FORM_FIELDS: TControllerInputFormField[] = [
{
key: "OIDC_CLIENT_ID",
type: "text",
label: "Client ID",
description: "Your authentication provider's public identifier for the client.",
placeholder: "abc123xyz789",
error: Boolean(errors.OIDC_CLIENT_ID),
required: true,
},
{
key: "OIDC_CLIENT_SECRET",
type: "password",
label: "Client secret",
description: "Secret key provided by your authentication provider for the client.",
placeholder: "s3cr3tK3y123!",
error: Boolean(errors.OIDC_CLIENT_SECRET),
required: true,
},
{
key: "OIDC_AUTHORIZE_URL",
type: "text",
label: "Authorize URL",
description: "The URL for interacting with the resource owner to obtain an authorization grant.",
placeholder: "https://example.com/",
error: Boolean(errors.OIDC_AUTHORIZE_URL),
required: true,
},
{
key: "OIDC_TOKEN_URL",
type: "text",
label: "Token URL",
description: "URL to fetch the access token from a grant or refresh token.",
placeholder: "https://example.com/oauth/token",
error: Boolean(errors.OIDC_TOKEN_URL),
required: true,
},
{
key: "OIDC_USERINFO_URL",
type: "text",
label: "UserInfo URL",
description: "The URL to fetch user claims and information.",
placeholder: "https://example.com/userinfo",
error: Boolean(errors.OIDC_USERINFO_URL),
required: true,
},
{
key: "OIDC_LOGOUT_URL",
type: "text",
label: "Logout URL",
description: "Add your OIDC logout URL here for seamless session management.",
placeholder: "https://example.com/logout",
error: Boolean(errors.OIDC_LOGOUT_URL),
required: false,
},
{
key: "OIDC_PROVIDER_NAME",
type: "text",
label: "Identity provider name",
description: "This name will be shown on sign in and create account CTA buttons.",
placeholder: "Okta",
error: Boolean(errors.OIDC_PROVIDER_NAME),
required: false,
},
];
const OIDC_SERVICE_DETAILS: TCopyField[] = [
{
key: "Origin_URI",
label: "Origin URI",
url: `${originURL}/auth/oidc/`,
description: "We will auto-generate this. Add this as a trusted origin in your identity provider.",
},
{
key: "Callback_URI",
label: "Callback URI",
url: `${originURL}/auth/oidc/callback/`,
description:
"We will auto generate this. Paste this in the sign-in redirect URI section in your identity provider.",
},
{
key: "Logout_URI",
label: "Logout URI",
url: `${originURL}/auth/oidc/logout/`,
description: "We will auto-generate this. Paste this in sign-out redirect URI in your identity provider",
},
];
const onSubmit = async (formData: OIDCConfigFormValues) => {
const payload: Partial<OIDCConfigFormValues> = { ...formData };
await updateInstanceConfigurations(payload)
.then((response = []) => {
setToast({
type: TOAST_TYPE.SUCCESS,
title: "Success",
message: "OIDC Configuration Settings updated successfully",
});
reset({
OIDC_CLIENT_ID: response.find((item) => item.key === "OIDC_CLIENT_ID")?.value,
OIDC_CLIENT_SECRET: response.find((item) => item.key === "OIDC_CLIENT_SECRET")?.value,
OIDC_AUTHORIZE_URL: response.find((item) => item.key === "OIDC_AUTHORIZE_URL")?.value,
OIDC_TOKEN_URL: response.find((item) => item.key === "OIDC_TOKEN_URL")?.value,
OIDC_USERINFO_URL: response.find((item) => item.key === "OIDC_USERINFO_URL")?.value,
OIDC_LOGOUT_URL: response.find((item) => item.key === "OIDC_LOGOUT_URL")?.value,
OIDC_PROVIDER_NAME: response.find((item) => item.key === "OIDC_PROVIDER_NAME")?.value,
});
})
.catch((err) => console.error(err));
};
const handleGoBack = (e: React.MouseEvent<HTMLAnchorElement, MouseEvent>) => {
if (isDirty) {
e.preventDefault();
setIsDiscardChangesModalOpen(true);
}
};
return (
<>
<ConfirmDiscardModal
isOpen={isDiscardChangesModalOpen}
onDiscardHref="/authentication"
handleClose={() => setIsDiscardChangesModalOpen(false)}
/>
<div className="flex flex-col gap-8">
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1">
<div className="pt-2 text-xl font-medium">Configuration</div>
{OIDC_FORM_FIELDS.map((field) => (
<ControllerInput
key={field.key}
control={control}
type={field.type}
name={field.key}
label={field.label}
description={field.description}
placeholder={field.placeholder}
error={field.error}
required={field.required}
/>
))}
<div className="flex flex-col gap-1 pt-4">
<div className="flex items-center gap-4">
<Button variant="primary" onClick={handleSubmit(onSubmit)} loading={isSubmitting} disabled={!isDirty}>
{isSubmitting ? "Saving..." : "Save changes"}
</Button>
<Link
href="/authentication"
className={cn(getButtonStyling("link-neutral", "md"), "font-medium")}
onClick={handleGoBack}
>
Go back
</Link>
</div>
</div>
</div>
<div className="col-span-2 md:col-span-1">
<div className="flex flex-col gap-y-4 px-6 py-4 my-2 bg-custom-background-80/60 rounded-lg">
<div className="pt-2 text-xl font-medium">Service provider details</div>
{OIDC_SERVICE_DETAILS.map((field) => (
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
))}
</div>
</div>
</div>
</div>
</>
);
};

View File

@@ -0,0 +1,102 @@
"use client";
import { useState } from "react";
import Image from "next/image";
import { observer } from "mobx-react-lite";
import useSWR from "swr";
// hooks
import { useInstance } from "@/hooks/store";
// ui
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
// components
import { PageHeader } from "@/components/core";
import { AuthenticationMethodCard } from "../components";
import { InstanceOIDCConfigForm } from "./form";
// icons
import OIDCLogo from "/public/logos/oidc-logo.png";
const InstanceOIDCAuthenticationPage = observer(() => {
// store
const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
// state
const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
// config
const enableOIDCConfig = formattedConfig?.IS_OIDC_ENABLED ?? "";
useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
const updateConfig = async (key: "IS_OIDC_ENABLED", value: string) => {
setIsSubmitting(true);
const payload = {
[key]: value,
};
const updateConfigPromise = updateInstanceConfigurations(payload);
setPromiseToast(updateConfigPromise, {
loading: "Saving Configuration...",
success: {
title: "Configuration saved",
message: () => `OIDC authentication is now ${value ? "active" : "disabled"}.`,
},
error: {
title: "Error",
message: () => "Failed to save configuration",
},
});
await updateConfigPromise
.then(() => {
setIsSubmitting(false);
})
.catch((err) => {
console.error(err);
setIsSubmitting(false);
});
};
return (
<>
<PageHeader title="Authentication - God Mode" />
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
<AuthenticationMethodCard
name="OIDC"
description="Authenticate your users via the OpenID connect protocol."
icon={<Image src={OIDCLogo} height={24} width={24} alt="OIDC Logo" />}
config={
<ToggleSwitch
value={Boolean(parseInt(enableOIDCConfig))}
onChange={() => {
Boolean(parseInt(enableOIDCConfig)) === true
? updateConfig("IS_OIDC_ENABLED", "0")
: updateConfig("IS_OIDC_ENABLED", "1");
}}
size="sm"
disabled={isSubmitting || !formattedConfig}
/>
}
disabled={isSubmitting || !formattedConfig}
withBorder={false}
/>
</div>
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md p-4">
{formattedConfig ? (
<InstanceOIDCConfigForm config={formattedConfig} />
) : (
<Loader className="space-y-8">
<Loader.Item height="50px" width="25%" />
<Loader.Item height="50px" />
<Loader.Item height="50px" />
<Loader.Item height="50px" />
<Loader.Item height="50px" />
<Loader.Item height="50px" width="50%" />
</Loader>
)}
</div>
</div>
</>
);
});
export default InstanceOIDCAuthenticationPage;

View File

@@ -10,14 +10,17 @@ import { TInstanceConfigurationKeys } from "@plane/types";
import { Loader, setPromiseToast } from "@plane/ui";
// components
import { PageHeader } from "@/components/core";
// hooks
// helpers
import { resolveGeneralTheme } from "@/helpers/common.helper";
// hooks
import { useInstance } from "@/hooks/store";
// images
import githubLightModeImage from "@/public/logos/github-black.png";
import githubDarkModeImage from "@/public/logos/github-white.png";
import GoogleLogo from "@/public/logos/google-logo.svg";
// images - enterprise
import OIDCLogo from "@/public/logos/oidc-logo.png";
import SAMLLogo from "@/public/logos/saml-logo.svg";
// local components
import {
AuthenticationMethodCard,
@@ -25,6 +28,9 @@ import {
PasswordLoginConfiguration,
GithubConfiguration,
GoogleConfiguration,
// enterprise
OIDCConfiguration,
SAMLConfiguration,
} from "./components";
type TInstanceAuthenticationMethodCard = {
@@ -116,6 +122,24 @@ const InstanceAuthenticationPage = observer(() => {
},
];
// Enterprise authentication methods
authenticationMethodsCard.push(
{
key: "oidc",
name: "OIDC",
description: "Authenticate your users via the OpenID connect protocol.",
icon: <Image src={OIDCLogo} height={20} width={20} alt="OIDC Logo" />,
config: <OIDCConfiguration disabled={isSubmitting} updateConfig={updateConfig} />,
},
{
key: "saml",
name: "SAML",
description: "Authenticate your users via Security Assertion Markup Language protocol.",
icon: <Image src={SAMLLogo} height={24} width={24} alt="SAML Logo" className="pb-0.5 pl-0.5" />,
config: <SAMLConfiguration disabled={isSubmitting} updateConfig={updateConfig} />,
}
);
return (
<>
<PageHeader title="Authentication - God Mode" />

View File

@@ -0,0 +1,226 @@
import { FC, useState } from "react";
import Link from "next/link";
import { Controller, useForm } from "react-hook-form";
// types
import { IFormattedInstanceConfiguration, TInstanceSAMLAuthenticationConfigurationKeys } from "@plane/types";
// ui
import { Button, TOAST_TYPE, TextArea, getButtonStyling, setToast } from "@plane/ui";
// components
import {
ConfirmDiscardModal,
ControllerInput,
TControllerInputFormField,
CopyField,
TCopyField,
} from "@/components/common";
// helpers
import { cn } from "@/helpers/common.helper";
// hooks
import { useInstance } from "@/hooks/store";
type Props = {
config: IFormattedInstanceConfiguration;
};
type SAMLConfigFormValues = Record<TInstanceSAMLAuthenticationConfigurationKeys, string>;
export const InstanceSAMLConfigForm: FC<Props> = (props) => {
const { config } = props;
// states
const [isDiscardChangesModalOpen, setIsDiscardChangesModalOpen] = useState(false);
// store hooks
const { updateInstanceConfigurations } = useInstance();
// form data
const {
handleSubmit,
control,
reset,
formState: { errors, isDirty, isSubmitting },
} = useForm<SAMLConfigFormValues>({
defaultValues: {
SAML_ENTITY_ID: config["SAML_ENTITY_ID"],
SAML_SSO_URL: config["SAML_SSO_URL"],
SAML_LOGOUT_URL: config["SAML_LOGOUT_URL"],
SAML_CERTIFICATE: config["SAML_CERTIFICATE"],
SAML_PROVIDER_NAME: config["SAML_PROVIDER_NAME"],
},
});
const originURL = typeof window !== "undefined" ? window.location.origin : "";
const SAML_FORM_FIELDS: TControllerInputFormField[] = [
{
key: "SAML_ENTITY_ID",
type: "text",
label: "Entity ID",
description: "Unique identifier for your Identity Provider (IdP) entity.",
placeholder: "70a44354520df8bd9bcd",
error: Boolean(errors.SAML_ENTITY_ID),
required: true,
},
{
key: "SAML_SSO_URL",
type: "text",
label: "SSO URL",
description: "URL used for Single Sign-On (SSO) with your Identity Provider (IdP).",
placeholder: "https://example.com/sso",
error: Boolean(errors.SAML_SSO_URL),
required: true,
},
{
key: "SAML_LOGOUT_URL",
type: "text",
label: "Logout URL",
description: "Add your SAML logout URL here for seamless session management.",
placeholder: "https://example.com/logout",
error: Boolean(errors.SAML_LOGOUT_URL),
required: false,
},
{
key: "SAML_PROVIDER_NAME",
type: "text",
label: "Identity provider name",
description: "This name will be shown on sign in and create account CTA buttons.",
placeholder: "Okta",
error: Boolean(errors.SAML_PROVIDER_NAME),
required: false,
},
];
const SAML_SERVICE_DETAILS: TCopyField[] = [
{
key: "Metadata_Information",
label: "Entity ID / Audience / Metadata Information",
url: `${originURL}/auth/saml/metadata/`,
description:
"This contains the link to the metadata information. We will auto-generate this.",
},
{
key: "Callback_URI",
label: "Callback URI",
url: `${originURL}/auth/saml/callback/`,
description:
"This url is a http-post request. Paste this in the single sign-on callback url section of your identity.",
},
{
key: "Logout_URI",
label: "Logout URI",
url: `${originURL}/auth/saml/logout/`,
description: "This url is a http-redirect request. Add this to your logout URI.",
},
];
const onSubmit = async (formData: SAMLConfigFormValues) => {
const payload: Partial<SAMLConfigFormValues> = { ...formData };
await updateInstanceConfigurations(payload)
.then((response = []) => {
setToast({
type: TOAST_TYPE.SUCCESS,
title: "Success",
message: "SAML Configuration Settings updated successfully",
});
reset({
SAML_ENTITY_ID: response.find((item) => item.key === "SAML_ENTITY_ID")?.value,
SAML_SSO_URL: response.find((item) => item.key === "SAML_SSO_URL")?.value,
SAML_LOGOUT_URL: response.find((item) => item.key === "SAML_LOGOUT_URL")?.value,
SAML_CERTIFICATE: response.find((item) => item.key === "SAML_CERTIFICATE")?.value,
SAML_PROVIDER_NAME: response.find((item) => item.key === "SAML_PROVIDER_NAME")?.value,
});
})
.catch((err) => console.error(err));
};
const handleGoBack = (e: React.MouseEvent<HTMLAnchorElement, MouseEvent>) => {
if (isDirty) {
e.preventDefault();
setIsDiscardChangesModalOpen(true);
}
};
return (
<>
<ConfirmDiscardModal
isOpen={isDiscardChangesModalOpen}
onDiscardHref="/authentication"
handleClose={() => setIsDiscardChangesModalOpen(false)}
/>
<div className="flex flex-col gap-8">
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1">
<div className="pt-2 text-xl font-medium">Configuration</div>
{SAML_FORM_FIELDS.map((field) => (
<ControllerInput
key={field.key}
control={control}
type={field.type}
name={field.key}
label={field.label}
description={field.description}
placeholder={field.placeholder}
error={field.error}
required={field.required}
/>
))}
<div className="flex flex-col gap-1">
<h4 className="text-sm">Certificate</h4>
<Controller
control={control}
name="SAML_CERTIFICATE"
rules={{ required: "Certificate is required." }}
render={({ field: { value, onChange } }) => (
<TextArea
id="SAML_CERTIFICATE"
name="SAML_CERTIFICATE"
value={value}
onChange={onChange}
hasError={Boolean(errors.SAML_CERTIFICATE)}
placeholder="---BEGIN CERTIFICATE---\n2yWn1gc7DhOFB9\nr0gbE+\n---END CERTIFICATE---"
className="min-h-[102px] w-full rounded-md font-medium text-sm"
/>
)}
/>
<p className="text-xs text-custom-text-400">
Certificate used by your IdP for digital signature verification in SAML transactions.
</p>
</div>
<div className="flex flex-col gap-1 pt-4">
<div className="flex items-center gap-4">
<Button variant="primary" onClick={handleSubmit(onSubmit)} loading={isSubmitting} disabled={!isDirty}>
{isSubmitting ? "Saving..." : "Save changes"}
</Button>
<Link
href="/authentication"
className={cn(getButtonStyling("link-neutral", "md"), "font-medium")}
onClick={handleGoBack}
>
Go back
</Link>
</div>
</div>
</div>
<div className="col-span-2 md:col-span-1">
<div className="flex flex-col gap-y-4 px-6 py-4 my-2 bg-custom-background-80/60 rounded-lg">
<div className="pt-2 text-xl font-medium">Service provider details</div>
{SAML_SERVICE_DETAILS.map((field) => (
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
))}
<div className="flex flex-col gap-1">
<h4 className="text-sm text-custom-text-200 font-medium">Name ID format</h4>
<p className="text-sm text-custom-text-100">emailAddress</p>
</div>
<div className="flex flex-col gap-1">
<h4 className="text-sm text-custom-text-200 font-medium">Attribute mapping</h4>
<ul className="text-sm text-custom-text-100 list-disc pl-6">
<li>first_name to user.firstName</li>
<li>last_name to user.lastName</li>
<li>email to user.email</li>
</ul>
</div>
</div>
</div>
</div>
</div>
</>
);
};

View File

@@ -0,0 +1,102 @@
"use client";
import { useState } from "react";
import { observer } from "mobx-react-lite";
import Image from "next/image";
import useSWR from "swr";
// ui
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
// components
import { PageHeader } from "@/components/core";
import { AuthenticationMethodCard } from "../components";
import { InstanceSAMLConfigForm } from "./form";
// hooks
import { useInstance } from "@/hooks/store";
// icons
import SAMLLogo from "/public/logos/saml-logo.svg";
const InstanceSAMLAuthenticationPage = observer(() => {
// store
const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
// state
const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
// config
const enableSAMLConfig = formattedConfig?.IS_SAML_ENABLED ?? "";
useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
const updateConfig = async (key: "IS_SAML_ENABLED", value: string) => {
setIsSubmitting(true);
const payload = {
[key]: value,
};
const updateConfigPromise = updateInstanceConfigurations(payload);
setPromiseToast(updateConfigPromise, {
loading: "Saving Configuration...",
success: {
title: "Configuration saved",
message: () => `SAML authentication is now ${value ? "active" : "disabled"}.`,
},
error: {
title: "Error",
message: () => "Failed to save configuration",
},
});
await updateConfigPromise
.then(() => {
setIsSubmitting(false);
})
.catch((err) => {
console.error(err);
setIsSubmitting(false);
});
};
return (
<>
<PageHeader title="Authentication - God Mode" />
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
<AuthenticationMethodCard
name="SAML"
description="Authenticate your users via Security Assertion Markup Language
protocol."
icon={<Image src={SAMLLogo} height={26} width={26} alt="SAML Logo" className="pb-1 pl-0.5" />}
config={
<ToggleSwitch
value={Boolean(parseInt(enableSAMLConfig))}
onChange={() => {
Boolean(parseInt(enableSAMLConfig)) === true
? updateConfig("IS_SAML_ENABLED", "0")
: updateConfig("IS_SAML_ENABLED", "1");
}}
size="sm"
disabled={isSubmitting || !formattedConfig}
/>
}
disabled={isSubmitting || !formattedConfig}
withBorder={false}
/>
</div>
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md p-4">
{formattedConfig ? (
<InstanceSAMLConfigForm config={formattedConfig} />
) : (
<Loader className="space-y-8">
<Loader.Item height="50px" width="25%" />
<Loader.Item height="50px" />
<Loader.Item height="50px" />
<Loader.Item height="50px" />
<Loader.Item height="50px" width="50%" />
</Loader>
)}
</div>
</div>
</>
);
});
export default InstanceSAMLAuthenticationPage;

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

View File

@@ -0,0 +1,16 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg" fill="none">
<g fill="#C22E33">
<path d="M7.754 2l.463.41c.343.304.687.607 1.026.915C11.44 5.32 13.3 7.565 14.7 10.149c.072.132.137.268.202.403l.098.203-.108.057-.081-.115-.21-.299-.147-.214c-1.019-1.479-2.04-2.96-3.442-4.145a6.563 6.563 0 00-1.393-.904c-1.014-.485-1.916-.291-2.69.505-.736.757-1.118 1.697-1.463 2.653-.045.123-.092.245-.139.367l-.082.215-.172-.055c.1-.348.192-.698.284-1.049.21-.795.42-1.59.712-2.356.31-.816.702-1.603 1.093-2.39.169-.341.338-.682.5-1.025h.092z"/>
<path d="M8.448 11.822c-1.626.77-5.56 1.564-7.426 1.36C.717 11.576 3.71 4.05 5.18 2.91l-.095.218a4.638 4.638 0 01-.138.303l-.066.129c-.76 1.462-1.519 2.926-1.908 4.53a7.482 7.482 0 00-.228 1.689c-.01 1.34.824 2.252 2.217 2.309.67.027 1.347-.043 2.023-.114.294-.03.587-.061.88-.084.108-.008.214-.021.352-.039l.231-.028z"/>
<path d="M3.825 14.781c-.445.034-.89.068-1.333.108 4.097.39 8.03-.277 11.91-1.644-1.265-2.23-2.97-3.991-4.952-5.522.026.098.084.169.141.239l.048.06c.17.226.348.448.527.67.409.509.818 1.018 1.126 1.578.778 1.42.356 2.648-1.168 3.296-1.002.427-2.097.718-3.18.892-1.03.164-2.075.243-3.119.323z"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -20,7 +20,8 @@ RUN apk --no-cache add \
"make~=4.3" \
"postgresql13-dev~=13" \
"libc-dev" \
"linux-headers"
"linux-headers" \
"xmlsec-dev"
WORKDIR /code

View File

@@ -0,0 +1,18 @@
#!/bin/bash
set -e
export SKIP_ENV_VAR=0
python manage.py wait_for_db
# Wait for migrations
python manage.py wait_for_migrations
# Clear Cache before starting to remove stale values
python manage.py clear_cache
# Register instance if INSTANCE_ADMIN_EMAIL is set
if [ -n "$INSTANCE_ADMIN_EMAIL" ]; then
python manage.py setup_instance $INSTANCE_ADMIN_EMAIL
fi
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -

View File

@@ -0,0 +1,39 @@
#!/bin/bash
set -e
python manage.py wait_for_db
# Wait for migrations
python manage.py wait_for_migrations
# Create the default bucket
#!/bin/bash
# Collect system information
HOSTNAME=$(hostname)
MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
CPU_INFO=$(cat /proc/cpuinfo)
MEMORY_INFO=$(free -h)
DISK_INFO=$(df -h)
# Concatenate information and compute SHA-256 hash
SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
# Export the variables
MACHINE_SIGNATURE=${MACHINE_SIGNATURE:-$SIGNATURE}
export SKIP_ENV_VAR=1
# License check
python manage.py license_check
# Register instance
python manage.py register_instance "$MACHINE_SIGNATURE"
# Load the configuration variable
python manage.py configure_instance
# Create the default bucket
python manage.py create_bucket
# Clear Cache before starting to remove stale values
python manage.py clear_cache
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -

View File

@@ -32,4 +32,4 @@ python manage.py create_bucket
# Clear Cache before starting to remove stale values
python manage.py clear_cache
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -

0
apiserver/bin/docker-entrypoint-beat.sh Normal file → Executable file
View File

0
apiserver/bin/docker-entrypoint-migrator.sh Normal file → Executable file
View File

View File

@@ -0,0 +1,8 @@
from rest_framework.authentication import SessionAuthentication
class BaseSessionAuthentication(SessionAuthentication):
# Disable csrf for the rest apis
def enforce_csrf(self, request):
return

View File

@@ -40,6 +40,9 @@ from .view import (
GlobalViewSerializer,
IssueViewSerializer,
)
from .active_cycle import ActiveCycleSerializer
from .cycle import (
CycleSerializer,
CycleIssueSerializer,
@@ -121,3 +124,13 @@ from .exporter import ExporterHistorySerializer
from .webhook import WebhookSerializer, WebhookLogSerializer
from .dashboard import DashboardSerializer, WidgetSerializer
from .integration import (
IntegrationSerializer,
WorkspaceIntegrationSerializer,
GithubIssueSyncSerializer,
GithubRepositorySerializer,
GithubRepositorySyncSerializer,
GithubCommentSyncSerializer,
SlackProjectSyncSerializer,
)

View File

@@ -0,0 +1,58 @@
# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .project import ProjectLiteSerializer
from plane.db.models import (
Cycle,
)
class ActiveCycleSerializer(BaseSerializer):
# favorite
is_favorite = serializers.BooleanField(read_only=True)
total_issues = serializers.IntegerField(read_only=True)
# state group wise distribution
cancelled_issues = serializers.IntegerField(read_only=True)
completed_issues = serializers.IntegerField(read_only=True)
started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
# active | draft | upcoming | completed
status = serializers.CharField(read_only=True)
# project details
project_detail = ProjectLiteSerializer(read_only=True, source="project")
class Meta:
model = Cycle
fields = [
# necessary fields
"id",
"workspace_id",
"project_id",
# model fields
"name",
"description",
"start_date",
"end_date",
"owned_by_id",
"view_props",
"sort_order",
"external_source",
"external_id",
"progress_snapshot",
# meta fields
"is_favorite",
"total_issues",
"cancelled_issues",
"completed_issues",
"started_issues",
"unstarted_issues",
"backlog_issues",
"status",
"project_detail",
]
read_only_fields = fields

View File

@@ -3,6 +3,7 @@ from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .issue import IssueStateSerializer
from plane.db.models import (
Cycle,

View File

@@ -0,0 +1,8 @@
from .base import IntegrationSerializer, WorkspaceIntegrationSerializer
from .github import (
GithubRepositorySerializer,
GithubRepositorySyncSerializer,
GithubIssueSyncSerializer,
GithubCommentSyncSerializer,
)
from .slack import SlackProjectSyncSerializer

View File

@@ -0,0 +1,22 @@
# Module imports
from plane.app.serializers import BaseSerializer
from plane.db.models import Integration, WorkspaceIntegration
class IntegrationSerializer(BaseSerializer):
class Meta:
model = Integration
fields = "__all__"
read_only_fields = [
"verified",
]
class WorkspaceIntegrationSerializer(BaseSerializer):
integration_detail = IntegrationSerializer(
read_only=True, source="integration"
)
class Meta:
model = WorkspaceIntegration
fields = "__all__"

View File

@@ -0,0 +1,45 @@
# Module imports
from plane.app.serializers import BaseSerializer
from plane.db.models import (
GithubIssueSync,
GithubRepository,
GithubRepositorySync,
GithubCommentSync,
)
class GithubRepositorySerializer(BaseSerializer):
class Meta:
model = GithubRepository
fields = "__all__"
class GithubRepositorySyncSerializer(BaseSerializer):
repo_detail = GithubRepositorySerializer(source="repository")
class Meta:
model = GithubRepositorySync
fields = "__all__"
class GithubIssueSyncSerializer(BaseSerializer):
class Meta:
model = GithubIssueSync
fields = "__all__"
read_only_fields = [
"project",
"workspace",
"repository_sync",
]
class GithubCommentSyncSerializer(BaseSerializer):
class Meta:
model = GithubCommentSync
fields = "__all__"
read_only_fields = [
"project",
"workspace",
"repository_sync",
"issue_sync",
]

View File

@@ -0,0 +1,14 @@
# Module imports
from plane.app.serializers import BaseSerializer
from plane.db.models import SlackProjectSync
class SlackProjectSyncSerializer(BaseSerializer):
class Meta:
model = SlackProjectSync
fields = "__all__"
read_only_fields = [
"project",
"workspace",
"workspace_integration",
]

View File

@@ -18,6 +18,11 @@ from .views import urlpatterns as view_urls
from .webhook import urlpatterns as webhook_urls
from .workspace import urlpatterns as workspace_urls
from .importer import urlpatterns as importer_urls
from .integration import urlpatterns as integration_urls
from .active_cycle import urlpatterns as active_cycle_urls
urlpatterns = [
*analytic_urls,
*asset_urls,
@@ -38,4 +43,8 @@ urlpatterns = [
*workspace_urls,
*api_urls,
*webhook_urls,
# ee
*active_cycle_urls,
*integration_urls,
*importer_urls,
]

View File

@@ -0,0 +1,13 @@
from django.urls import path
from plane.app.views import (
ActiveCycleEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/active-cycles/",
ActiveCycleEndpoint.as_view(),
name="workspace-active-cycle",
),
]

View File

@@ -0,0 +1,43 @@
from django.urls import path
from plane.app.views import (
ServiceIssueImportSummaryEndpoint,
ImportServiceEndpoint,
UpdateServiceImportStatusEndpoint,
BulkImportIssuesEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/importers/<str:service>/",
ServiceIssueImportSummaryEndpoint.as_view(),
name="importer-summary",
),
path(
"workspaces/<str:slug>/projects/importers/<str:service>/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/importers/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/importers/<str:service>/<uuid:pk>/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/service/<str:service>/importers/<uuid:importer_id>/",
UpdateServiceImportStatusEndpoint.as_view(),
name="importer-status",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
BulkImportIssuesEndpoint.as_view(),
name="bulk-import-issues",
),
]

View File

@@ -0,0 +1,150 @@
from django.urls import path
from plane.app.views import (
IntegrationViewSet,
WorkspaceIntegrationViewSet,
GithubRepositoriesEndpoint,
GithubRepositorySyncViewSet,
GithubIssueSyncViewSet,
GithubCommentSyncViewSet,
BulkCreateGithubIssueSyncEndpoint,
SlackProjectSyncViewSet,
)
urlpatterns = [
path(
"integrations/",
IntegrationViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="integrations",
),
path(
"integrations/<uuid:pk>/",
IntegrationViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/",
WorkspaceIntegrationViewSet.as_view(
{
"get": "list",
}
),
name="workspace-integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/<str:provider>/",
WorkspaceIntegrationViewSet.as_view(
{
"post": "create",
}
),
name="workspace-integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/<uuid:pk>/provider/",
WorkspaceIntegrationViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="workspace-integrations",
),
# Github Integrations
path(
"workspaces/<str:slug>/workspace-integrations/<uuid:workspace_integration_id>/github-repositories/",
GithubRepositoriesEndpoint.as_view(),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/",
GithubRepositorySyncViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/<uuid:pk>/",
GithubRepositorySyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/",
GithubIssueSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/bulk-create-github-issue-sync/",
BulkCreateGithubIssueSyncEndpoint.as_view(),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
GithubIssueSyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/",
GithubCommentSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/<uuid:pk>/",
GithubCommentSyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
## End Github Integrations
# Slack Integration
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/",
SlackProjectSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/<uuid:pk>/",
SlackProjectSyncViewSet.as_view(
{
"delete": "destroy",
"get": "retrieve",
}
),
),
## End Slack Integration
]

View File

@@ -4,6 +4,7 @@ from django.urls import path
from plane.app.views import (
GlobalSearchEndpoint,
IssueSearchEndpoint,
SearchEndpoint,
)
@@ -18,4 +19,9 @@ urlpatterns = [
IssueSearchEndpoint.as_view(),
name="project-issue-search",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/search/",
SearchEndpoint.as_view(),
name="search",
),
]

View File

@@ -94,6 +94,7 @@ from .cycle.base import (
CycleViewSet,
TransferCycleIssueEndpoint,
)
from .cycle.active_cycle import ActiveCycleEndpoint
from .cycle.issue import (
CycleIssueViewSet,
)
@@ -180,7 +181,7 @@ from .page.base import (
PagesDescriptionViewSet,
)
from .search import GlobalSearchEndpoint, IssueSearchEndpoint
from .search import GlobalSearchEndpoint, IssueSearchEndpoint, SearchEndpoint
from .external.base import (
@@ -221,6 +222,28 @@ from .dashboard.base import DashboardEndpoint, WidgetsEndpoint
from .error_404 import custom_404_view
from .importer.base import (
ServiceIssueImportSummaryEndpoint,
ImportServiceEndpoint,
UpdateServiceImportStatusEndpoint,
BulkImportIssuesEndpoint,
BulkImportModulesEndpoint,
)
from .integration.base import (
IntegrationViewSet,
WorkspaceIntegrationViewSet,
)
from .integration.github import (
GithubRepositoriesEndpoint,
GithubRepositorySyncViewSet,
GithubIssueSyncViewSet,
GithubCommentSyncViewSet,
BulkCreateGithubIssueSyncEndpoint,
)
from .integration.slack import SlackProjectSyncViewSet
from .exporter.base import ExportIssuesEndpoint
from .notification.base import MarkAllReadNotificationViewSet
from .user.base import AccountEndpoint, ProfileEndpoint, UserSessionEndpoint

View File

@@ -0,0 +1,259 @@
# Django imports
from django.db.models import (
Case,
CharField,
Count,
Exists,
F,
OuterRef,
Prefetch,
Q,
Value,
When,
)
from django.utils import timezone
# Module imports
from plane.app.permissions import (
WorkspaceUserPermission,
)
from plane.app.serializers import (
ActiveCycleSerializer,
)
from plane.db.models import (
Cycle,
CycleFavorite,
Issue,
Label,
User,
)
from plane.utils.analytics_plot import burndown_plot
from plane.app.views.base import BaseAPIView
class ActiveCycleEndpoint(BaseAPIView):
permission_classes = [
WorkspaceUserPermission,
]
def get_results_controller(self, results, active_cycles=None):
for cycle in results:
assignee_distribution = (
Issue.issue_objects.filter(
issue_cycle__cycle_id=cycle["id"],
project_id=cycle["project_id"],
workspace__slug=self.kwargs.get("slug"),
)
.annotate(display_name=F("assignees__display_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(avatar=F("assignees__avatar"))
.values("display_name", "assignee_id", "avatar")
.annotate(
total_issues=Count(
"assignee_id",
filter=Q(archived_at__isnull=True, is_draft=False),
),
)
.annotate(
completed_issues=Count(
"assignee_id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_issues=Count(
"assignee_id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("display_name")
)
label_distribution = (
Issue.issue_objects.filter(
issue_cycle__cycle_id=cycle["id"],
project_id=cycle["project_id"],
workspace__slug=self.kwargs.get("slug"),
)
.annotate(label_name=F("labels__name"))
.annotate(color=F("labels__color"))
.annotate(label_id=F("labels__id"))
.values("label_name", "color", "label_id")
.annotate(
total_issues=Count(
"label_id",
filter=Q(archived_at__isnull=True, is_draft=False),
)
)
.annotate(
completed_issues=Count(
"label_id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_issues=Count(
"label_id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("label_name")
)
cycle["distribution"] = {
"assignees": assignee_distribution,
"labels": label_distribution,
"completion_chart": {},
}
if cycle["start_date"] and cycle["end_date"]:
cycle["distribution"]["completion_chart"] = burndown_plot(
queryset=active_cycles.get(pk=cycle["id"]),
slug=self.kwargs.get("slug"),
project_id=cycle["project_id"],
cycle_id=cycle["id"],
)
return results
def get(self, request, slug):
subquery = CycleFavorite.objects.filter(
user=self.request.user,
cycle_id=OuterRef("pk"),
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"),
)
active_cycles = (
Cycle.objects.filter(
workspace__slug=slug,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
start_date__lte=timezone.now(),
end_date__gte=timezone.now(),
)
.select_related("project")
.select_related("workspace")
.select_related("owned_by")
.annotate(is_favorite=Exists(subquery))
.annotate(
total_issues=Count(
"issue_cycle",
filter=Q(
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
completed_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
cancelled_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="cancelled",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
started_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
unstarted_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="unstarted",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
backlog_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="backlog",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
status=Case(
When(
Q(start_date__lte=timezone.now())
& Q(end_date__gte=timezone.now()),
then=Value("CURRENT"),
),
When(
start_date__gt=timezone.now(), then=Value("UPCOMING")
),
When(end_date__lt=timezone.now(), then=Value("COMPLETED")),
When(
Q(start_date__isnull=True) & Q(end_date__isnull=True),
then=Value("DRAFT"),
),
default=Value("DRAFT"),
output_field=CharField(),
)
)
.prefetch_related(
Prefetch(
"issue_cycle__issue__assignees",
queryset=User.objects.only(
"avatar", "first_name", "id"
).distinct(),
)
)
.prefetch_related(
Prefetch(
"issue_cycle__issue__labels",
queryset=Label.objects.only(
"name", "color", "id"
).distinct(),
)
)
.order_by("-created_at")
)
return self.paginate(
request=request,
queryset=active_cycles,
on_results=lambda active_cycles: ActiveCycleSerializer(
active_cycles, many=True
).data,
controller=lambda results: self.get_results_controller(
results, active_cycles
),
default_per_page=int(request.GET.get("per_page", 3)),
)

View File

@@ -25,6 +25,7 @@ from django.core.serializers.json import DjangoJSONEncoder
# Third party imports
from rest_framework import status
from rest_framework.response import Response
from plane.app.permissions import (
ProjectEntityPermission,
ProjectLitePermission,

View File

@@ -0,0 +1,560 @@
# Python imports
import uuid
# Third party imports
from rest_framework import status
from rest_framework.response import Response
# Django imports
from django.db.models import Max, Q
# Module imports
from plane.app.views import BaseAPIView
from plane.db.models import (
WorkspaceIntegration,
Importer,
APIToken,
Project,
State,
IssueSequence,
Issue,
IssueActivity,
IssueComment,
IssueLink,
IssueLabel,
Workspace,
IssueAssignee,
Module,
ModuleLink,
ModuleIssue,
Label,
)
from plane.app.serializers import (
ImporterSerializer,
IssueFlatSerializer,
ModuleSerializer,
)
from plane.utils.integrations.github import get_github_repo_details
from plane.utils.importers.jira import (
jira_project_issue_summary,
is_allowed_hostname,
)
from plane.bgtasks.importer_task import service_importer
from plane.utils.html_processor import strip_tags
from plane.app.permissions import WorkSpaceAdminPermission
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
def get(self, request, slug, service):
if service == "github":
owner = request.GET.get("owner", False)
repo = request.GET.get("repo", False)
if not owner or not repo:
return Response(
{"error": "Owner and repo are required"},
status=status.HTTP_400_BAD_REQUEST,
)
workspace_integration = WorkspaceIntegration.objects.get(
integration__provider="github", workspace__slug=slug
)
access_tokens_url = workspace_integration.metadata.get(
"access_tokens_url", False
)
if not access_tokens_url:
return Response(
{
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
},
status=status.HTTP_400_BAD_REQUEST,
)
issue_count, labels, collaborators = get_github_repo_details(
access_tokens_url, owner, repo
)
return Response(
{
"issue_count": issue_count,
"labels": labels,
"collaborators": collaborators,
},
status=status.HTTP_200_OK,
)
if service == "jira":
# Check for all the keys
params = {
"project_key": "Project key is required",
"api_token": "API token is required",
"email": "Email is required",
"cloud_hostname": "Cloud hostname is required",
}
for key, error_message in params.items():
if not request.GET.get(key, False):
return Response(
{"error": error_message},
status=status.HTTP_400_BAD_REQUEST,
)
project_key = request.GET.get("project_key", "")
api_token = request.GET.get("api_token", "")
email = request.GET.get("email", "")
cloud_hostname = request.GET.get("cloud_hostname", "")
response = jira_project_issue_summary(
email, api_token, project_key, cloud_hostname
)
if "error" in response:
return Response(response, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(
response,
status=status.HTTP_200_OK,
)
return Response(
{"error": "Service not supported yet"},
status=status.HTTP_400_BAD_REQUEST,
)
class ImportServiceEndpoint(BaseAPIView):
permission_classes = [
WorkSpaceAdminPermission,
]
def post(self, request, slug, service):
project_id = request.data.get("project_id", False)
if not project_id:
return Response(
{"error": "Project ID is required"},
status=status.HTTP_400_BAD_REQUEST,
)
workspace = Workspace.objects.get(slug=slug)
if service == "github":
data = request.data.get("data", False)
metadata = request.data.get("metadata", False)
config = request.data.get("config", False)
if not data or not metadata or not config:
return Response(
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
)
importer = Importer.objects.create(
service=service,
project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
)
service_importer.delay(service, importer.id)
serializer = ImporterSerializer(importer)
return Response(serializer.data, status=status.HTTP_201_CREATED)
if service == "jira":
data = request.data.get("data", False)
metadata = request.data.get("metadata", False)
config = request.data.get("config", False)
cloud_hostname = metadata.get("cloud_hostname", False)
if not cloud_hostname:
return Response(
{"error": "Cloud hostname is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if not is_allowed_hostname(cloud_hostname):
return Response(
{"error": "Hostname is not a valid hostname."},
status=status.HTTP_400_BAD_REQUEST,
)
if not data or not metadata:
return Response(
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
)
importer = Importer.objects.create(
service=service,
project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
)
service_importer.delay(service, importer.id)
serializer = ImporterSerializer(importer)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(
{"error": "Servivce not supported yet"},
status=status.HTTP_400_BAD_REQUEST,
)
def get(self, request, slug):
imports = (
Importer.objects.filter(workspace__slug=slug)
.order_by("-created_at")
.select_related("initiated_by", "project", "workspace")
)
serializer = ImporterSerializer(imports, many=True)
return Response(serializer.data)
def delete(self, request, slug, service, pk):
importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug
)
if importer.imported_data is not None:
# Delete all imported Issues
imported_issues = importer.imported_data.get("issues", [])
Issue.issue_objects.filter(id__in=imported_issues).delete()
# Delete all imported Labels
imported_labels = importer.imported_data.get("labels", [])
Label.objects.filter(id__in=imported_labels).delete()
if importer.service == "jira":
imported_modules = importer.imported_data.get("modules", [])
Module.objects.filter(id__in=imported_modules).delete()
importer.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
def patch(self, request, slug, service, pk):
importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug
)
serializer = ImporterSerializer(
importer, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class UpdateServiceImportStatusEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service, importer_id):
importer = Importer.objects.get(
pk=importer_id,
workspace__slug=slug,
project_id=project_id,
service=service,
)
importer.status = request.data.get("status", "processing")
importer.save()
return Response(status.HTTP_200_OK)
class BulkImportIssuesEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service):
# Get the project
project = Project.objects.get(pk=project_id, workspace__slug=slug)
# Get the default state
default_state = State.objects.filter(
~Q(name="Triage"), project_id=project_id, default=True
).first()
# if there is no default state assign any random state
if default_state is None:
default_state = State.objects.filter(
~Q(name="Triage"), project_id=project_id
).first()
# Get the maximum sequence_id
last_id = IssueSequence.objects.filter(
project_id=project_id
).aggregate(largest=Max("sequence"))["largest"]
last_id = 1 if last_id is None else last_id + 1
# Get the maximum sort order
largest_sort_order = Issue.objects.filter(
project_id=project_id, state=default_state
).aggregate(largest=Max("sort_order"))["largest"]
largest_sort_order = (
65535 if largest_sort_order is None else largest_sort_order + 10000
)
# Get the issues_data
issues_data = request.data.get("issues_data", [])
if not len(issues_data):
return Response(
{"error": "Issue data is required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Issues
bulk_issues = []
for issue_data in issues_data:
bulk_issues.append(
Issue(
project_id=project_id,
workspace_id=project.workspace_id,
state_id=(
issue_data.get("state")
if issue_data.get("state", False)
else default_state.id
),
name=issue_data.get("name", "Issue Created through Bulk"),
description_html=issue_data.get(
"description_html", "<p></p>"
),
description_stripped=(
None
if (
issue_data.get("description_html") == ""
or issue_data.get("description_html") is None
)
else strip_tags(issue_data.get("description_html"))
),
sequence_id=last_id,
sort_order=largest_sort_order,
start_date=issue_data.get("start_date", None),
target_date=issue_data.get("target_date", None),
priority=issue_data.get("priority", "none"),
created_by=request.user,
)
)
largest_sort_order = largest_sort_order + 10000
last_id = last_id + 1
issues = Issue.objects.bulk_create(
bulk_issues,
batch_size=100,
ignore_conflicts=True,
)
# Sequences
_ = IssueSequence.objects.bulk_create(
[
IssueSequence(
issue=issue,
sequence=issue.sequence_id,
project_id=project_id,
workspace_id=project.workspace_id,
)
for issue in issues
],
batch_size=100,
)
# Attach Labels
bulk_issue_labels = []
for issue, issue_data in zip(issues, issues_data):
labels_list = issue_data.get("labels_list", [])
bulk_issue_labels = bulk_issue_labels + [
IssueLabel(
issue=issue,
label_id=label_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for label_id in labels_list
]
_ = IssueLabel.objects.bulk_create(
bulk_issue_labels, batch_size=100, ignore_conflicts=True
)
# Attach Assignees
bulk_issue_assignees = []
for issue, issue_data in zip(issues, issues_data):
assignees_list = issue_data.get("assignees_list", [])
bulk_issue_assignees = bulk_issue_assignees + [
IssueAssignee(
issue=issue,
assignee_id=assignee_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for assignee_id in assignees_list
]
_ = IssueAssignee.objects.bulk_create(
bulk_issue_assignees, batch_size=100, ignore_conflicts=True
)
# Track the issue activities
IssueActivity.objects.bulk_create(
[
IssueActivity(
issue=issue,
actor=request.user,
project_id=project_id,
workspace_id=project.workspace_id,
comment=f"imported the issue from {service}",
verb="created",
created_by=request.user,
)
for issue in issues
],
batch_size=100,
)
# Create Comments
bulk_issue_comments = []
for issue, issue_data in zip(issues, issues_data):
comments_list = issue_data.get("comments_list", [])
bulk_issue_comments = bulk_issue_comments + [
IssueComment(
issue=issue,
comment_html=comment.get("comment_html", "<p></p>"),
actor=request.user,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for comment in comments_list
]
_ = IssueComment.objects.bulk_create(
bulk_issue_comments, batch_size=100
)
# Attach Links
_ = IssueLink.objects.bulk_create(
[
IssueLink(
issue=issue,
url=issue_data.get("link", {}).get(
"url", "https://github.com"
),
title=issue_data.get("link", {}).get(
"title", "Original Issue"
),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for issue, issue_data in zip(issues, issues_data)
]
)
return Response(
{"issues": IssueFlatSerializer(issues, many=True).data},
status=status.HTTP_201_CREATED,
)
class BulkImportModulesEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service):
modules_data = request.data.get("modules_data", [])
project = Project.objects.get(pk=project_id, workspace__slug=slug)
modules = Module.objects.bulk_create(
[
Module(
name=module.get("name", uuid.uuid4().hex),
description=module.get("description", ""),
start_date=module.get("start_date", None),
target_date=module.get("target_date", None),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for module in modules_data
],
batch_size=100,
ignore_conflicts=True,
)
modules = Module.objects.filter(
id__in=[module.id for module in modules]
)
if len(modules) == len(modules_data):
_ = ModuleLink.objects.bulk_create(
[
ModuleLink(
module=module,
url=module_data.get("link", {}).get(
"url", "https://plane.so"
),
title=module_data.get("link", {}).get(
"title", "Original Issue"
),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for module, module_data in zip(modules, modules_data)
],
batch_size=100,
ignore_conflicts=True,
)
bulk_module_issues = []
for module, module_data in zip(modules, modules_data):
module_issues_list = module_data.get("module_issues_list", [])
bulk_module_issues = bulk_module_issues + [
ModuleIssue(
issue_id=issue,
module=module,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for issue in module_issues_list
]
_ = ModuleIssue.objects.bulk_create(
bulk_module_issues, batch_size=100, ignore_conflicts=True
)
serializer = ModuleSerializer(modules, many=True)
return Response(
{"modules": serializer.data}, status=status.HTTP_201_CREATED
)
else:
return Response(
{
"message": "Modules created but issues could not be imported"
},
status=status.HTTP_200_OK,
)

View File

@@ -0,0 +1,9 @@
from .base import IntegrationViewSet, WorkspaceIntegrationViewSet
from .github import (
GithubRepositorySyncViewSet,
GithubIssueSyncViewSet,
BulkCreateGithubIssueSyncEndpoint,
GithubCommentSyncViewSet,
GithubRepositoriesEndpoint,
)
from .slack import SlackProjectSyncViewSet

View File

@@ -0,0 +1,181 @@
# Python improts
import uuid
# Django imports
from django.contrib.auth.hashers import make_password
# Third party imports
from rest_framework.response import Response
from rest_framework import status
# Module imports
from plane.app.views import BaseViewSet
from plane.db.models import (
Integration,
WorkspaceIntegration,
Workspace,
User,
WorkspaceMember,
APIToken,
)
from plane.app.serializers import (
IntegrationSerializer,
WorkspaceIntegrationSerializer,
)
from plane.utils.integrations.github import (
get_github_metadata,
delete_github_installation,
)
from plane.app.permissions import WorkSpaceAdminPermission
from plane.utils.integrations.slack import slack_oauth
class IntegrationViewSet(BaseViewSet):
serializer_class = IntegrationSerializer
model = Integration
def create(self, request):
serializer = IntegrationSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, pk):
integration = Integration.objects.get(pk=pk)
if integration.verified:
return Response(
{"error": "Verified integrations cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = IntegrationSerializer(
integration, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, pk):
integration = Integration.objects.get(pk=pk)
if integration.verified:
return Response(
{"error": "Verified integrations cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class WorkspaceIntegrationViewSet(BaseViewSet):
serializer_class = WorkspaceIntegrationSerializer
model = WorkspaceIntegration
permission_classes = [
WorkSpaceAdminPermission,
]
def get_queryset(self):
return (
super()
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.select_related("integration")
)
def create(self, request, slug, provider):
workspace = Workspace.objects.get(slug=slug)
integration = Integration.objects.get(provider=provider)
config = {}
if provider == "github":
installation_id = request.data.get("installation_id", None)
if not installation_id:
return Response(
{"error": "Installation ID is required"},
status=status.HTTP_400_BAD_REQUEST,
)
metadata = get_github_metadata(installation_id)
config = {"installation_id": installation_id}
if provider == "slack":
code = request.data.get("code", False)
if not code:
return Response(
{"error": "Code is required"},
status=status.HTTP_400_BAD_REQUEST,
)
slack_response = slack_oauth(code=code)
metadata = slack_response
access_token = metadata.get("access_token", False)
team_id = metadata.get("team", {}).get("id", False)
if not metadata or not access_token or not team_id:
return Response(
{
"error": "Slack could not be installed. Please try again later"
},
status=status.HTTP_400_BAD_REQUEST,
)
config = {"team_id": team_id, "access_token": access_token}
# Create a bot user
bot_user = User.objects.create(
email=f"{uuid.uuid4().hex}@plane.so",
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
is_bot=True,
first_name=integration.title,
avatar=(
integration.avatar_url
if integration.avatar_url is not None
else ""
),
)
# Create an API Token for the bot user
api_token = APIToken.objects.create(
user=bot_user,
user_type=1, # bot user
workspace=workspace,
)
workspace_integration = WorkspaceIntegration.objects.create(
workspace=workspace,
integration=integration,
actor=bot_user,
api_token=api_token,
metadata=metadata,
config=config,
)
# Add bot user as a member of workspace
_ = WorkspaceMember.objects.create(
workspace=workspace_integration.workspace,
member=bot_user,
role=20,
)
return Response(
WorkspaceIntegrationSerializer(workspace_integration).data,
status=status.HTTP_201_CREATED,
)
def destroy(self, request, slug, pk):
workspace_integration = WorkspaceIntegration.objects.get(
pk=pk, workspace__slug=slug
)
if workspace_integration.integration.provider == "github":
installation_id = workspace_integration.config.get(
"installation_id", False
)
if installation_id:
delete_github_installation(installation_id=installation_id)
workspace_integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@@ -0,0 +1,201 @@
# Third party imports
from rest_framework import status
from rest_framework.response import Response
# Module imports
from plane.app.views import BaseViewSet, BaseAPIView
from plane.db.models import (
GithubIssueSync,
GithubRepositorySync,
GithubRepository,
WorkspaceIntegration,
ProjectMember,
Label,
GithubCommentSync,
Project,
)
from plane.app.serializers import (
GithubIssueSyncSerializer,
GithubRepositorySyncSerializer,
GithubCommentSyncSerializer,
)
from plane.utils.integrations.github import get_github_repos
from plane.app.permissions import (
ProjectBasePermission,
ProjectEntityPermission,
)
class GithubRepositoriesEndpoint(BaseAPIView):
permission_classes = [
ProjectBasePermission,
]
def get(self, request, slug, workspace_integration_id):
page = request.GET.get("page", 1)
workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id
)
if workspace_integration.integration.provider != "github":
return Response(
{"error": "Not a github integration"},
status=status.HTTP_400_BAD_REQUEST,
)
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
repositories_url = (
workspace_integration.metadata["repositories_url"]
+ f"?per_page=100&page={page}"
)
repositories = get_github_repos(access_tokens_url, repositories_url)
return Response(repositories, status=status.HTTP_200_OK)
class GithubRepositorySyncViewSet(BaseViewSet):
permission_classes = [
ProjectBasePermission,
]
serializer_class = GithubRepositorySyncSerializer
model = GithubRepositorySync
def perform_create(self, serializer):
serializer.save(project_id=self.kwargs.get("project_id"))
def get_queryset(self):
return (
super()
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
)
def create(self, request, slug, project_id, workspace_integration_id):
name = request.data.get("name", False)
url = request.data.get("url", False)
config = request.data.get("config", {})
repository_id = request.data.get("repository_id", False)
owner = request.data.get("owner", False)
if not name or not url or not repository_id or not owner:
return Response(
{"error": "Name, url, repository_id and owner are required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get the workspace integration
workspace_integration = WorkspaceIntegration.objects.get(
pk=workspace_integration_id
)
# Delete the old repository object
GithubRepositorySync.objects.filter(
project_id=project_id, workspace__slug=slug
).delete()
GithubRepository.objects.filter(
project_id=project_id, workspace__slug=slug
).delete()
# Create repository
repo = GithubRepository.objects.create(
name=name,
url=url,
config=config,
repository_id=repository_id,
owner=owner,
project_id=project_id,
)
# Create a Label for github
label = Label.objects.filter(
name="GitHub",
project_id=project_id,
).first()
if label is None:
label = Label.objects.create(
name="GitHub",
project_id=project_id,
description="Label to sync Plane issues with GitHub issues",
color="#003773",
)
# Create repo sync
repo_sync = GithubRepositorySync.objects.create(
repository=repo,
workspace_integration=workspace_integration,
actor=workspace_integration.actor,
credentials=request.data.get("credentials", {}),
project_id=project_id,
label=label,
)
# Add bot as a member in the project
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor, role=20, project_id=project_id
)
# Return Response
return Response(
GithubRepositorySyncSerializer(repo_sync).data,
status=status.HTTP_201_CREATED,
)
class GithubIssueSyncViewSet(BaseViewSet):
permission_classes = [
ProjectEntityPermission,
]
serializer_class = GithubIssueSyncSerializer
model = GithubIssueSync
def perform_create(self, serializer):
serializer.save(
project_id=self.kwargs.get("project_id"),
repository_sync_id=self.kwargs.get("repo_sync_id"),
)
class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
def post(self, request, slug, project_id, repo_sync_id):
project = Project.objects.get(pk=project_id, workspace__slug=slug)
github_issue_syncs = request.data.get("github_issue_syncs", [])
github_issue_syncs = GithubIssueSync.objects.bulk_create(
[
GithubIssueSync(
issue_id=github_issue_sync.get("issue"),
repo_issue_id=github_issue_sync.get("repo_issue_id"),
issue_url=github_issue_sync.get("issue_url"),
github_issue_id=github_issue_sync.get("github_issue_id"),
repository_sync_id=repo_sync_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
updated_by=request.user,
)
for github_issue_sync in github_issue_syncs
],
batch_size=100,
ignore_conflicts=True,
)
serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
return Response(serializer.data, status=status.HTTP_201_CREATED)
class GithubCommentSyncViewSet(BaseViewSet):
permission_classes = [
ProjectEntityPermission,
]
serializer_class = GithubCommentSyncSerializer
model = GithubCommentSync
def perform_create(self, serializer):
serializer.save(
project_id=self.kwargs.get("project_id"),
issue_sync_id=self.kwargs.get("issue_sync_id"),
)

View File

@@ -0,0 +1,95 @@
# Django import
from django.db import IntegrityError
# Third party imports
from rest_framework import status
from rest_framework.response import Response
from sentry_sdk import capture_exception
# Module imports
from plane.app.views import BaseViewSet
from plane.db.models import (
SlackProjectSync,
WorkspaceIntegration,
ProjectMember,
)
from plane.app.serializers import SlackProjectSyncSerializer
from plane.app.permissions import (
ProjectBasePermission,
)
from plane.utils.integrations.slack import slack_oauth
class SlackProjectSyncViewSet(BaseViewSet):
permission_classes = [
ProjectBasePermission,
]
serializer_class = SlackProjectSyncSerializer
model = SlackProjectSync
def get_queryset(self):
return (
super()
.get_queryset()
.filter(
workspace__slug=self.kwargs.get("slug"),
project_id=self.kwargs.get("project_id"),
)
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
)
def create(self, request, slug, project_id, workspace_integration_id):
try:
code = request.data.get("code", False)
if not code:
return Response(
{"error": "Code is required"},
status=status.HTTP_400_BAD_REQUEST,
)
slack_response = slack_oauth(code=code)
workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id
)
workspace_integration = WorkspaceIntegration.objects.get(
pk=workspace_integration_id, workspace__slug=slug
)
slack_project_sync = SlackProjectSync.objects.create(
access_token=slack_response.get("access_token"),
scopes=slack_response.get("scope"),
bot_user_id=slack_response.get("bot_user_id"),
webhook_url=slack_response.get("incoming_webhook", {}).get(
"url"
),
data=slack_response,
team_id=slack_response.get("team", {}).get("id"),
team_name=slack_response.get("team", {}).get("name"),
workspace_integration=workspace_integration,
project_id=project_id,
)
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor,
role=20,
project_id=project_id,
)
serializer = SlackProjectSyncSerializer(slack_project_sync)
return Response(serializer.data, status=status.HTTP_200_OK)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "Slack is already installed for the project"},
status=status.HTTP_410_GONE,
)
capture_exception(e)
return Response(
{
"error": "Slack could not be installed. Please try again later"
},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@@ -18,6 +18,7 @@ from plane.db.models import (
Module,
Page,
IssueView,
ProjectMember,
)
from plane.utils.issue_search import search_issues
@@ -249,7 +250,7 @@ class IssueSearchEndpoint(BaseAPIView):
workspace__slug=slug,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True
project__archived_at__isnull=True,
)
if workspace_search == "false":
@@ -301,3 +302,201 @@ class IssueSearchEndpoint(BaseAPIView):
),
status=status.HTTP_200_OK,
)
class SearchEndpoint(BaseAPIView):
def get(self, request, slug, project_id):
query = request.query_params.get("query", False)
query_type = request.query_params.get("query_type", "issue")
count = int(request.query_params.get("count", 5))
if query_type == "mention":
fields = ["member__first_name", "member__last_name"]
q = Q()
if query:
for field in fields:
q |= Q(**{f"{field}__icontains": query})
users = (
ProjectMember.objects.filter(
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project_id=project_id,
workspace__slug=slug,
)
.order_by("-created_at")
.values(
"member__first_name",
"member__last_name",
"member__avatar",
"member__display_name",
"member__id",
)[:count]
)
fields = ["name"]
q = Q()
if query:
for field in fields:
q |= Q(**{f"{field}__icontains": query})
pages = (
Page.objects.filter(
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
workspace__slug=slug,
access=0,
)
.order_by("-created_at")
.values("name", "id")[:count]
)
return Response(
{"users": users, "pages": pages}, status=status.HTTP_200_OK
)
if query_type == "project":
fields = ["name", "identifier"]
q = Q()
if query:
for field in fields:
q |= Q(**{f"{field}__icontains": query})
projects = (
Project.objects.filter(
q,
Q(project_projectmember__member=self.request.user)
| Q(network=2),
workspace__slug=slug,
)
.order_by("-created_at")
.distinct()
.values("name", "id", "identifier", "workspace__slug")[:count]
)
return Response(projects, status=status.HTTP_200_OK)
if query_type == "issue":
fields = ["name", "sequence_id", "project__identifier"]
q = Q()
if query:
for field in fields:
if field == "sequence_id":
# Match whole integers only (exclude decimal numbers)
sequences = re.findall(r"\b\d+\b", query)
for sequence_id in sequences:
q |= Q(**{"sequence_id": sequence_id})
else:
q |= Q(**{f"{field}__icontains": query})
issues = (
Issue.issue_objects.filter(
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
workspace__slug=slug,
project_id=project_id,
)
.order_by("-created_at")
.distinct()
.values(
"name",
"id",
"sequence_id",
"project__identifier",
"project_id",
"priority",
"state_id",
)[:count]
)
return Response(issues, status=status.HTTP_200_OK)
if query_type == "cycle":
fields = ["name"]
q = Q()
if query:
for field in fields:
q |= Q(**{f"{field}__icontains": query})
cycles = (
Cycle.objects.filter(
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
workspace__slug=slug,
)
.order_by("-created_at")
.distinct()
.values(
"name",
"id",
"project_id",
"project__identifier",
"workspace__slug",
)[:count]
)
return Response(cycles, status=status.HTTP_200_OK)
if query_type == "module":
fields = ["name"]
q = Q()
if query:
for field in fields:
q |= Q(**{f"{field}__icontains": query})
modules = (
Module.objects.filter(
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
workspace__slug=slug,
)
.order_by("-created_at")
.distinct()
.values(
"name",
"id",
"project_id",
"project__identifier",
"workspace__slug",
)[:count]
)
return Response(modules, status=status.HTTP_200_OK)
if query_type == "page":
fields = ["name"]
q = Q()
if query:
for field in fields:
q |= Q(**{f"{field}__icontains": query})
pages = (
Page.objects.filter(
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project_id=project_id,
workspace__slug=slug,
access=0,
)
.order_by("-created_at")
.distinct()
.values(
"name",
"id",
"project_id",
"project__identifier",
"workspace__slug",
)[:count]
)
return Response(pages, status=status.HTTP_200_OK)
return Response(
{"error": "Please provide a valid query"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@@ -1,6 +1,6 @@
# Python imports
from datetime import datetime
import uuid
import jwt
# Django imports
@@ -22,7 +22,7 @@ from plane.app.serializers import (
WorkSpaceMemberSerializer,
)
from plane.app.views.base import BaseAPIView
from plane.bgtasks.event_tracking_task import workspace_invite_event
from plane.bgtasks.event_tracking_task import track_event
from plane.bgtasks.workspace_invitation_task import workspace_invitation
from plane.db.models import (
User,
@@ -227,13 +227,20 @@ class WorkspaceJoinEndpoint(BaseAPIView):
workspace_invite.delete()
# Send event
workspace_invite_event.delay(
user=user.id if user is not None else None,
track_event.delay(
email=email,
user_agent=request.META.get("HTTP_USER_AGENT"),
ip=request.META.get("REMOTE_ADDR"),
event_name="MEMBER_ACCEPTED",
accepted_from="EMAIL",
properties={
"event_id": uuid.uuid4().hex,
"user": {"email": email, "id": str(user)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR", None),
"user_agent": request.META.get(
"HTTP_USER_AGENT", None
),
},
"accepted_from": "EMAIL",
},
)
return Response(

View File

@@ -37,6 +37,12 @@ AUTHENTICATION_ERROR_CODES = {
"GITHUB_NOT_CONFIGURED": 5110,
"GOOGLE_OAUTH_PROVIDER_ERROR": 5115,
"GITHUB_OAUTH_PROVIDER_ERROR": 5120,
# OIDC
"OIDC_NOT_CONFIGURED": 5190,
"OIDC_PROVIDER_ERROR": 5195,
# SAML
"SAML_NOT_CONFIGURED": 5190,
"SAML_PROVIDER_ERROR": 5195,
# Reset Password
"INVALID_PASSWORD_TOKEN": 5125,
"EXPIRED_PASSWORD_TOKEN": 5130,

View File

@@ -0,0 +1,209 @@
# Python imports
import os
# Django imports
from django.conf import settings
# Third party imports
from onelogin.saml2.auth import OneLogin_Saml2_Auth
# Module imports
from plane.license.utils.instance_value import get_configuration_value
from .base import Adapter
from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
class SAMLAdapter(Adapter):
provider = "saml"
auth = None
saml_config = {}
def __init__(
self,
request,
):
(
SAML_ENTITY_ID,
SAML_SSO_URL,
SAML_LOGOUT_URL,
SAML_CERTIFICATE,
) = get_configuration_value(
[
{
"key": "SAML_ENTITY_ID",
"default": os.environ.get("SAML_ENTITY_ID"),
},
{
"key": "SAML_SSO_URL",
"default": os.environ.get("SAML_SSO_URL"),
},
{
"key": "SAML_LOGOUT_URL",
"default": os.environ.get("SAML_LOGOUT_URL"),
},
{
"key": "SAML_CERTIFICATE",
"default": os.environ.get("SAML_CERTIFICATE"),
},
]
)
if not (SAML_ENTITY_ID and SAML_SSO_URL and SAML_CERTIFICATE):
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES["SAML_NOT_CONFIGURED"],
error_message="SAML_NOT_CONFIGURED",
)
super().__init__(request, self.provider)
req = self.prepare_saml_request(self.request)
saml_config = self.generate_saml_configuration(
request=request,
entity_id=SAML_ENTITY_ID,
sso_url=SAML_SSO_URL,
logout_url=SAML_LOGOUT_URL,
idp_certificate=SAML_CERTIFICATE,
)
# Generate configuration
self.saml_config = saml_config
auth = OneLogin_Saml2_Auth(
req,
saml_config,
)
self.auth = auth
def generate_saml_configuration(
self,
request,
entity_id,
sso_url,
logout_url,
idp_certificate,
):
return {
"strict": True,
"debug": settings.DEBUG,
"sp": {
"entityId": f"{request.scheme}://{request.get_host()}/auth/saml/metadata/",
"assertionConsumerService": {
"url": f"{request.scheme}://{request.get_host()}/auth/saml/callback/",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST",
},
},
"idp": {
"entityId": entity_id,
"singleSignOnService": {
"url": sso_url,
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect",
},
"singleLogoutService": {
"url": logout_url,
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect",
},
"x509cert": idp_certificate,
},
"attributeConsumingService": {
"serviceName": "Plane SAML",
"serviceDescription": "Plane SAML",
"requestedAttributes": [
{
"name": "first_name",
"friendlyName": "user.firstName",
"isRequired": False,
"nameFormat": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic",
},
{
"name": "last_name",
"friendlyName": "user.lastName",
"isRequired": False,
"nameFormat": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic",
},
{
"name": "email",
"friendlyName": "user.email",
"isRequired": True,
"nameFormat": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic",
},
],
},
}
def prepare_saml_request(self, request):
return {
"https": "on" if request.is_secure() else "off",
"http_host": request.get_host(),
"script_name": request.path,
"get_data": request.GET.copy(),
"post_data": request.POST.copy(),
}
def get_auth_url(self):
return self.auth.login()
def authenticate(self):
self.auth.process_response()
errors = self.auth.get_errors()
if errors:
if not self.auth.is_authenticated():
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES[
"SAML_PROVIDER_ERROR"
],
error_message="SAML_PROVIDER_ERROR",
)
raise AuthenticationException(
error_message=AUTHENTICATION_ERROR_CODES[
"SAML_PROVIDER_ERROR"
],
error_code="SAML_PROVIDER_ERROR",
)
attributes = self.auth.get_attributes()
email = (
attributes.get("email")[0]
if attributes.get("email") and len(attributes.get("email"))
else None
)
if not email:
raise AuthenticationException(
error_message=AUTHENTICATION_ERROR_CODES[
"SAML_PROVIDER_ERROR"
],
error_code="SAML_PROVIDER_ERROR",
)
first_name = (
attributes.get("first_name")[0]
if attributes.get("first_name")
and len(attributes.get("first_name"))
else ""
)
last_name = (
attributes.get("last_name")[0]
if attributes.get("last_name") and len(attributes.get("last_name"))
else ""
)
super().set_user_data(
{
"email": email,
"user": {
"first_name": first_name,
"last_name": last_name,
"email": email,
},
}
)
return self.complete_login_or_signup()
def logout(self):
try:
return self.auth.logout()
except Exception:
return False

View File

@@ -0,0 +1,158 @@
# Python imports
import os
from datetime import datetime
from urllib.parse import urlencode
import pytz
# Module imports
from plane.authentication.adapter.oauth import OauthAdapter
from plane.license.utils.instance_value import get_configuration_value
from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.db.models import Account
class OIDCOAuthProvider(OauthAdapter):
provider = "oidc"
scope = "openid email profile"
def __init__(self, request, code=None, state=None):
(
OIDC_CLIENT_ID,
OIDC_CLIENT_SECRET,
OIDC_TOKEN_URL,
OIDC_USERINFO_URL,
OIDC_AUTHORIZE_URL,
) = get_configuration_value(
[
{
"key": "OIDC_CLIENT_ID",
"default": os.environ.get("OIDC_CLIENT_ID"),
},
{
"key": "OIDC_CLIENT_SECRET",
"default": os.environ.get("OIDC_CLIENT_SECRET"),
},
{
"key": "OIDC_TOKEN_URL",
"default": os.environ.get("OIDC_TOKEN_URL"),
},
{
"key": "OIDC_USERINFO_URL",
"default": os.environ.get("OIDC_USERINFO_URL"),
},
{
"key": "OIDC_AUTHORIZE_URL",
"default": os.environ.get("OIDC_AUTHORIZE_URL"),
},
]
)
if not (
OIDC_CLIENT_ID
and OIDC_CLIENT_SECRET
and OIDC_TOKEN_URL
and OIDC_USERINFO_URL
and OIDC_AUTHORIZE_URL
):
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES["OIDC_NOT_CONFIGURED"],
error_message="OIDC_NOT_CONFIGURED",
)
redirect_uri = (
f"{request.scheme}://{request.get_host()}/auth/oidc/callback/"
)
url_params = {
"client_id": OIDC_CLIENT_ID,
"response_type": "code",
"redirect_uri": redirect_uri,
"state": state,
"scope": self.scope,
}
auth_url = f"{OIDC_AUTHORIZE_URL}?{urlencode(url_params)}"
super().__init__(
request,
self.provider,
OIDC_CLIENT_ID,
self.scope,
redirect_uri,
auth_url,
OIDC_TOKEN_URL,
OIDC_USERINFO_URL,
OIDC_CLIENT_SECRET,
code,
)
def set_token_data(self):
data = {
"code": self.code,
"client_id": self.client_id,
"client_secret": self.client_secret,
"redirect_uri": self.redirect_uri,
"grant_type": "authorization_code",
}
token_response = self.get_user_token(
data=data,
headers={"Content-Type": "application/x-www-form-urlencoded"},
)
super().set_token_data(
{
"access_token": token_response.get("access_token"),
"refresh_token": token_response.get("refresh_token", None),
"access_token_expired_at": (
datetime.fromtimestamp(
token_response.get("expires_in"),
tz=pytz.utc,
)
if token_response.get("expires_in")
else None
),
"refresh_token_expired_at": (
datetime.fromtimestamp(
token_response.get("refresh_token_expired_at"),
tz=pytz.utc,
)
if token_response.get("refresh_token_expired_at")
else None
),
"id_token": token_response.get("id_token", ""),
}
)
def set_user_data(self):
user_info_response = self.get_user_response()
user_data = {
"email": user_info_response.get("email"),
"user": {
"avatar": user_info_response.get("picture"),
"first_name": user_info_response.get("given_name"),
"last_name": user_info_response.get("family_name"),
"provider_id": user_info_response.get("sub"),
"is_password_autoset": True,
},
}
super().set_user_data(user_data)
def logout(self, logout_url=None):
(OIDC_LOGOUT_URL,) = get_configuration_value(
[
{
"key": "OIDC_LOGOUT_URL",
"default": os.environ.get("OIDC_LOGOUT_URL"),
},
]
)
account = Account.objects.filter(
user=self.request.user, provider=self.provider
).first()
id_token = account.id_token if account and account.id_token else None
if OIDC_LOGOUT_URL and id_token and logout_url:
return f"{OIDC_LOGOUT_URL}?id_token_hint={id_token}&post_logout_redirect_uri={logout_url}"
else:
return False

View File

@@ -20,6 +20,15 @@ from .views import (
SignUpAuthEndpoint,
ForgotPasswordSpaceEndpoint,
ResetPasswordSpaceEndpoint,
# OIDC
OIDCAuthInitiateEndpoint,
OIDCallbackEndpoint,
OIDCLogoutEndpoint,
# SAML
SAMLAuthInitiateEndpoint,
SAMLCallbackEndpoint,
SAMLMetadataEndpoint,
SAMLLogoutEndpoint,
# Space
EmailCheckSpaceEndpoint,
GitHubCallbackSpaceEndpoint,
@@ -193,4 +202,41 @@ urlpatterns = [
SetUserPasswordEndpoint.as_view(),
name="set-password",
),
# OIDC
path(
"oidc/",
OIDCAuthInitiateEndpoint.as_view(),
name="oidc",
),
path(
"oidc/callback/",
OIDCallbackEndpoint.as_view(),
name="oidc",
),
path(
"oidc/logout/",
OIDCLogoutEndpoint.as_view(),
name="oidc",
),
# SAML
path(
"saml/",
SAMLAuthInitiateEndpoint.as_view(),
name="saml",
),
path(
"saml/callback/",
SAMLCallbackEndpoint.as_view(),
name="saml",
),
path(
"saml/metadata/",
SAMLMetadataEndpoint.as_view(),
name="saml",
),
path(
"saml/logout/",
SAMLLogoutEndpoint.as_view(),
name="saml",
),
]

View File

@@ -1,4 +1,9 @@
# Python imports
import uuid
# Module imports
from .workspace_project_join import process_workspace_project_invitations
from plane.bgtasks.event_tracking_task import track_event
def post_user_auth_workflow(
@@ -6,4 +11,28 @@ def post_user_auth_workflow(
is_signup,
request,
):
# Process workspace project invitations
process_workspace_project_invitations(user=user)
# track events
event_mapper = {
"email": "Email",
"google": "GOOGLE",
"magic-code": "Magic link",
"github": "GITHUB",
}
track_event.delay(
email=user.email,
event_name="Sign up" if is_signup else "Sign in",
properties={
"event_id": uuid.uuid4().hex,
"user": {"email": user.email, "id": str(user.id)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR", None),
"user_agent": request.META.get("HTTP_USER_AGENT", None),
},
"medium": event_mapper.get(user.last_login_medium, "Email"),
"first_time": is_signup,
},
)

View File

@@ -24,6 +24,19 @@ from .app.magic import (
MagicSignUpEndpoint,
)
from .app.oidc import (
OIDCAuthInitiateEndpoint,
OIDCallbackEndpoint,
OIDCLogoutEndpoint,
)
from .app.saml import (
SAMLAuthInitiateEndpoint,
SAMLCallbackEndpoint,
SAMLMetadataEndpoint,
SAMLLogoutEndpoint,
)
from .app.signout import SignOutAuthEndpoint

View File

@@ -0,0 +1,104 @@
# Python imports
import uuid
from urllib.parse import urlencode, urljoin
# Django imports
from django.http import HttpResponseRedirect
from django.views import View
from django.contrib.auth import logout
# Module imports
from plane.authentication.provider.oauth.oidc import OIDCOAuthProvider
from plane.authentication.utils.workspace_project_join import (
process_workspace_project_invitations,
)
from plane.authentication.utils.redirection_path import get_redirection_path
from plane.authentication.utils.login import user_login
from plane.license.models import Instance
from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.authentication.utils.host import base_host
class OIDCAuthInitiateEndpoint(View):
def get(self, request):
request.session["host"] = base_host(request=request, is_app=True)
next_path = request.GET.get("next_path")
if next_path:
request.session["next_path"] = str(next_path)
try:
# Check instance configuration
instance = Instance.objects.first()
if instance is None or not instance.is_setup_done:
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES[
"INSTANCE_NOT_CONFIGURED"
],
error_message="INSTANCE_NOT_CONFIGURED",
)
state = uuid.uuid4().hex
provider = OIDCOAuthProvider(request=request, state=state)
request.session["state"] = state
auth_url = provider.get_auth_url()
return HttpResponseRedirect(auth_url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
url = urljoin(
base_host(request=request, is_app=True),
"?" + urlencode(params),
)
return HttpResponseRedirect(url)
class OIDCallbackEndpoint(View):
def get(self, request):
code = request.GET.get("code")
state = request.GET.get("state")
host = request.session.get("host")
try:
if state != request.session.get("state", ""):
raise AuthenticationException(
error_code="OIDC_PROVIDER_ERROR",
error_message="OIDC_PROVIDER_ERROR",
)
if not code:
raise AuthenticationException(
error_code="OIDC_PROVIDER_ERROR",
error_message="OIDC_PROVIDER_ERROR",
)
provider = OIDCOAuthProvider(
request=request,
code=code,
)
user = provider.authenticate()
# Login the user and record his device info
user_login(request=request, user=user)
# Process workspace and project invitations
process_workspace_project_invitations(user=user)
# Get the redirection path
path = get_redirection_path(user=user)
# redirect to referer path
url = urljoin(host, path)
return HttpResponseRedirect(url)
except AuthenticationException as e:
url = urljoin(
host,
"?" + urlencode(e.get_error_dict()),
)
return HttpResponseRedirect(url)
class OIDCLogoutEndpoint(View):
def get(self, request):
logout(request=request)
return HttpResponseRedirect(base_host(request=request, is_app=True))

View File

@@ -0,0 +1,122 @@
# Python imports
from urllib.parse import urlencode, urljoin
# Django imports
from django.http import HttpResponseRedirect, HttpResponse
from django.views import View
from django.views.decorators.csrf import csrf_exempt
from django.utils.decorators import method_decorator
from django.contrib.auth import logout
# Module imports
from plane.authentication.adapter.saml import SAMLAdapter
from plane.authentication.utils.login import user_login
from plane.authentication.utils.workspace_project_join import (
process_workspace_project_invitations,
)
from plane.authentication.utils.redirection_path import get_redirection_path
from plane.license.models import Instance
from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.authentication.utils.host import base_host
class SAMLAuthInitiateEndpoint(View):
def get(self, request):
request.session["host"] = base_host(request=request, is_app=True)
next_path = request.GET.get("next_path")
if next_path:
request.session["next_path"] = str(next_path)
try:
# Check instance configuration
instance = Instance.objects.first()
if instance is None or not instance.is_setup_done:
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES[
"INSTANCE_NOT_CONFIGURED"
],
error_message="INSTANCE_NOT_CONFIGURED",
)
# Provider
provider = SAMLAdapter(
request=request,
)
# Get the auth url
return_url = provider.get_auth_url()
return HttpResponseRedirect(return_url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
url = urljoin(
base_host(request=request, is_app=True),
"?" + urlencode(params),
)
return HttpResponseRedirect(url)
@method_decorator(csrf_exempt, name="dispatch")
class SAMLCallbackEndpoint(View):
def post(self, request):
host = request.session.get("host", "/")
try:
provider = SAMLAdapter(request=request)
user = provider.authenticate()
# Login the user and record his device info
user_login(request=request, user=user)
# Process workspace and project invitations
process_workspace_project_invitations(user=user)
# Get the redirection path
path = get_redirection_path(user=user)
# redirect to referer path
url = urljoin(host, path)
return HttpResponseRedirect(url)
except AuthenticationException as e:
url = urljoin(host, "?" + urlencode(e.get_error_dict()))
return HttpResponseRedirect(url)
@method_decorator(csrf_exempt, name="dispatch")
class SAMLLogoutEndpoint(View):
def get(self, request, *args, **kwargs):
logout(request=request)
return HttpResponseRedirect(base_host(request=request, is_app=True))
@method_decorator(csrf_exempt, name="dispatch")
class SAMLMetadataEndpoint(View):
def get(self, request):
xml_template = f"""<EntityDescriptor xmlns="urn:oasis:names:tc:SAML:2.0:metadata"
entityID="{request.scheme}://{request.get_host()}/auth/saml/metadata/">
<SPSSODescriptor protocolSupportEnumeration="urn:oasis:names:tc:SAML:2.0:protocol">
<AssertionConsumerService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
Location="{request.scheme}://{request.get_host()}/auth/saml/callback/"
index="1"/>
<SingleLogoutService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
Location="{request.scheme}://{request.get_host()}/auth/saml/logout/"/>
<NameIDFormat>urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress</NameIDFormat>
<AttributeConsumingService index="1">
<ServiceName xml:lang="en">Plane</ServiceName>
<RequestedAttribute Name="user.firstName"
FriendlyName="first_name"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
isRequired="false"/>
<RequestedAttribute Name="user.lastName"
FriendlyName="last_name"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
isRequired="false"/>
<RequestedAttribute Name="user.email"
FriendlyName="email"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
isRequired="true"/>
</AttributeConsumingService>
</SPSSODescriptor>
</EntityDescriptor>
"""
return HttpResponse(xml_template, content_type="application/xml")

View File

@@ -7,6 +7,8 @@ from django.utils import timezone
# Module imports
from plane.authentication.utils.host import user_ip, base_host
from plane.db.models import User
from plane.authentication.provider.oauth.oidc import OIDCOAuthProvider
from plane.authentication.adapter.saml import SAMLAdapter
class SignOutAuthEndpoint(View):
@@ -18,7 +20,28 @@ class SignOutAuthEndpoint(View):
user.last_logout_ip = user_ip(request=request)
user.last_logout_time = timezone.now()
user.save()
# Log the user out
# Check if the last medium of user is oidc
if request.user.last_login_medium == "oidc":
provider = OIDCOAuthProvider(
request=request,
)
logout_url = provider.logout(
logout_url=f"{base_host(request=request, is_app=True)}/auth/oidc/logout/"
)
if logout_url:
return HttpResponseRedirect(logout_url)
# Check if the last medium of user is saml
if request.user.last_login_medium == "saml":
provider = SAMLAdapter(
request=request,
)
logout_url = provider.logout()
if logout_url:
return HttpResponseRedirect(logout_url)
# Logout user
logout(request)
return HttpResponseRedirect(
base_host(request=request, is_app=True)

View File

@@ -2,14 +2,69 @@ from django.utils import timezone
from datetime import timedelta
from plane.db.models import APIActivityLog
from celery import shared_task
from django.conf import settings
from pymongo import MongoClient
from pymongo.errors import BulkWriteError
from plane.utils.exception_logger import log_exception
BATCH_SIZE = 3000
@shared_task
def delete_api_logs():
# Get the logs older than 30 days to delete
logs_to_delete = APIActivityLog.objects.filter(
created_at__lte=timezone.now() - timedelta(days=30)
)
# Delete the logs
logs_to_delete._raw_delete(logs_to_delete.db)
if settings.MONGO_DB_URL:
# Get the logs older than 30 days to delete
logs_to_delete = APIActivityLog.objects.filter(
created_at__lte=timezone.now() - timedelta(days=30)
)
# Create a MongoDB client
client = MongoClient(settings.MONGO_DB_URL)
db = client["plane"]
collection = db["api_activity_logs"]
# Function to insert documents in batches
def bulk_insert(docs):
try:
collection.insert_many(docs)
except BulkWriteError as bwe:
log_exception(bwe)
# Prepare the logs for bulk insert
def log_generator():
batch = []
for log in logs_to_delete.iterator():
batch.append(
{
"token_identifier": log.token_identifier,
"path": log.path,
"method": log.method,
"query_params": log.query_params,
"headers": log.headers,
"body": log.body,
"response_body": log.response_body,
"response_code": log.response_code,
"ip_address": log.ip_address,
"user_agent": log.user_agent,
"created_at": log.created_at,
"updated_at": log.updated_at,
"created_by": str(log.created_by_id) if log.created_by_id else None,
"updated_by": str(log.updated_by_id) if log.updated_by_id else None,
}
)
# If batch size is reached, yield the batch
if len(batch) == BATCH_SIZE:
yield batch
batch = []
# Yield the remaining logs
if batch:
yield batch
# Upload the logs to MongoDB in batches
for batch in log_generator():
bulk_insert(batch)
# Delete the logs
logs_to_delete._raw_delete(logs_to_delete.db)

View File

@@ -0,0 +1,598 @@
# Python imports
import random
from datetime import datetime
# Django imports
from django.db.models import Max
# Third party imports
from celery import shared_task
from faker import Faker
# Module imports
from plane.db.models import (
Workspace,
WorkspaceMember,
User,
Project,
ProjectMember,
State,
Label,
Cycle,
Module,
Issue,
IssueSequence,
IssueAssignee,
IssueLabel,
IssueActivity,
CycleIssue,
ModuleIssue,
)
def create_workspace_members(workspace, members):
members = User.objects.filter(email__in=members)
_ = WorkspaceMember.objects.bulk_create(
[
WorkspaceMember(
workspace=workspace,
member=member,
role=20,
)
for member in members
],
ignore_conflicts=True,
)
return
def create_project(workspace, user_id):
fake = Faker()
name = fake.name()
project = Project.objects.create(
workspace=workspace,
name=name,
identifier=name[
: random.randint(2, 12 if len(name) - 1 >= 12 else len(name) - 1)
].upper(),
created_by_id=user_id,
)
# Add current member as project member
_ = ProjectMember.objects.create(
project=project,
member_id=user_id,
role=20,
)
return project
def create_project_members(workspace, project, members):
members = User.objects.filter(email__in=members)
_ = ProjectMember.objects.bulk_create(
[
ProjectMember(
project=project,
workspace=workspace,
member=member,
role=20,
sort_order=random.randint(0, 65535),
)
for member in members
],
ignore_conflicts=True,
)
return
def create_states(workspace, project, user_id):
states = [
{
"name": "Backlog",
"color": "#A3A3A3",
"sequence": 15000,
"group": "backlog",
"default": True,
},
{
"name": "Todo",
"color": "#3A3A3A",
"sequence": 25000,
"group": "unstarted",
},
{
"name": "In Progress",
"color": "#F59E0B",
"sequence": 35000,
"group": "started",
},
{
"name": "Done",
"color": "#16A34A",
"sequence": 45000,
"group": "completed",
},
{
"name": "Cancelled",
"color": "#EF4444",
"sequence": 55000,
"group": "cancelled",
},
]
states = State.objects.bulk_create(
[
State(
name=state["name"],
color=state["color"],
project=project,
sequence=state["sequence"],
workspace=workspace,
group=state["group"],
default=state.get("default", False),
created_by_id=user_id,
)
for state in states
]
)
return states
def create_labels(workspace, project, user_id):
fake = Faker()
Faker.seed(0)
return Label.objects.bulk_create(
[
Label(
name=fake.color_name(),
color=fake.hex_color(),
project=project,
workspace=workspace,
created_by_id=user_id,
sort_order=random.randint(0, 65535),
)
for _ in range(0, 50)
],
ignore_conflicts=True,
)
def create_cycles(workspace, project, user_id, cycle_count):
fake = Faker()
Faker.seed(0)
cycles = []
used_date_ranges = set() # Track used date ranges
while len(cycles) <= cycle_count:
# Generate a start date, allowing for None
start_date_option = [None, fake.date_this_year()]
start_date = start_date_option[random.randint(0, 1)]
# Initialize end_date based on start_date
end_date = (
None
if start_date is None
else fake.date_between_dates(
date_start=start_date,
date_end=datetime.now().date().replace(month=12, day=31),
)
)
# Ensure end_date is strictly after start_date if start_date is not None
while start_date is not None and (
end_date <= start_date
or (start_date, end_date) in used_date_ranges
):
end_date = fake.date_this_year()
# Add the unique date range to the set
(
used_date_ranges.add((start_date, end_date))
if (end_date is not None and start_date is not None)
else None
)
# Append the cycle with unique date range
cycles.append(
Cycle(
name=fake.name(),
owned_by_id=user_id,
sort_order=random.randint(0, 65535),
start_date=start_date,
end_date=end_date,
project=project,
workspace=workspace,
)
)
return Cycle.objects.bulk_create(cycles, ignore_conflicts=True)
def create_modules(workspace, project, user_id, module_count):
fake = Faker()
Faker.seed(0)
modules = []
for _ in range(0, module_count):
start_date = [None, fake.date_this_year()][random.randint(0, 1)]
end_date = (
None
if start_date is None
else fake.date_between_dates(
date_start=start_date,
date_end=datetime.now().date().replace(month=12, day=31),
)
)
modules.append(
Module(
name=fake.name(),
sort_order=random.randint(0, 65535),
start_date=start_date,
target_date=end_date,
project=project,
workspace=workspace,
)
)
return Module.objects.bulk_create(modules, ignore_conflicts=True)
def create_issues(workspace, project, user_id, issue_count):
fake = Faker()
Faker.seed(0)
states = State.objects.values_list("id", flat=True)
creators = ProjectMember.objects.values_list("member_id", flat=True)
issues = []
# Get the maximum sequence_id
last_id = IssueSequence.objects.filter(
project=project,
).aggregate(
largest=Max("sequence")
)["largest"]
last_id = 1 if last_id is None else last_id + 1
# Get the maximum sort order
largest_sort_order = Issue.objects.filter(
project=project,
state_id=states[random.randint(0, len(states) - 1)],
).aggregate(largest=Max("sort_order"))["largest"]
largest_sort_order = (
65535 if largest_sort_order is None else largest_sort_order + 10000
)
for _ in range(0, issue_count):
start_date = [None, fake.date_this_year()][random.randint(0, 1)]
end_date = (
None
if start_date is None
else fake.date_between_dates(
date_start=start_date,
date_end=datetime.now().date().replace(month=12, day=31),
)
)
sentence = fake.sentence()
issues.append(
Issue(
state_id=states[random.randint(0, len(states) - 1)],
project=project,
workspace=workspace,
name=sentence[:254],
description_html=f"<p>{sentence}</p>",
description_stripped=sentence,
sequence_id=last_id,
sort_order=largest_sort_order,
start_date=start_date,
target_date=end_date,
priority=["urgent", "high", "medium", "low", "none"][
random.randint(0, 4)
],
created_by_id=creators[random.randint(0, len(creators) - 1)],
)
)
largest_sort_order = largest_sort_order + random.randint(0, 1000)
last_id = last_id + 1
issues = Issue.objects.bulk_create(
issues, ignore_conflicts=True, batch_size=1000
)
# Sequences
_ = IssueSequence.objects.bulk_create(
[
IssueSequence(
issue=issue,
sequence=issue.sequence_id,
project=project,
workspace=workspace,
)
for issue in issues
],
batch_size=100,
)
# Track the issue activities
IssueActivity.objects.bulk_create(
[
IssueActivity(
issue=issue,
actor_id=user_id,
project=project,
workspace=workspace,
comment="created the issue",
verb="created",
created_by_id=user_id,
)
for issue in issues
],
batch_size=100,
)
return
def create_issue_parent(workspace, project, user_id, issue_count):
parent_count = issue_count / 4
parent_issues = Issue.objects.filter(project=project).values_list(
"id", flat=True
)[: int(parent_count)]
sub_issues = Issue.objects.filter(project=project).exclude(
pk__in=parent_issues
)[: int(issue_count / 2)]
bulk_sub_issues = []
for sub_issue in sub_issues:
sub_issue.parent_id = parent_issues[
random.randint(0, int(parent_count - 1))
]
Issue.objects.bulk_update(bulk_sub_issues, ["parent"], batch_size=1000)
def create_issue_assignees(workspace, project, user_id, issue_count):
# assignees
assignees = ProjectMember.objects.filter(project=project).values_list(
"member_id", flat=True
)
issues = random.sample(
list(
Issue.objects.filter(project=project).values_list("id", flat=True)
),
int(issue_count / 2),
)
# Bulk issue
bulk_issue_assignees = []
for issue in issues:
for assignee in random.sample(
list(assignees), random.randint(0, len(assignees) - 1)
):
bulk_issue_assignees.append(
IssueAssignee(
issue_id=issue,
assignee_id=assignee,
project=project,
workspace=workspace,
)
)
# Issue assignees
IssueAssignee.objects.bulk_create(
bulk_issue_assignees, batch_size=1000, ignore_conflicts=True
)
def create_issue_labels(workspace, project, user_id, issue_count):
# assignees
labels = Label.objects.filter(project=project).values_list("id", flat=True)
issues = random.sample(
list(
Issue.objects.filter(project=project).values_list("id", flat=True)
),
int(issue_count / 2),
)
# Bulk issue
bulk_issue_labels = []
for issue in issues:
for label in random.sample(
list(labels), random.randint(0, len(labels) - 1)
):
bulk_issue_labels.append(
IssueLabel(
issue_id=issue,
label_id=label,
project=project,
workspace=workspace,
)
)
# Issue assignees
IssueLabel.objects.bulk_create(
bulk_issue_labels, batch_size=1000, ignore_conflicts=True
)
def create_cycle_issues(workspace, project, user_id, issue_count):
# assignees
cycles = Cycle.objects.filter(project=project).values_list("id", flat=True)
issues = random.sample(
list(
Issue.objects.filter(project=project).values_list("id", flat=True)
),
int(issue_count / 2),
)
# Bulk issue
bulk_cycle_issues = []
for issue in issues:
cycle = cycles[random.randint(0, len(cycles) - 1)]
bulk_cycle_issues.append(
CycleIssue(
cycle_id=cycle,
issue_id=issue,
project=project,
workspace=workspace,
)
)
# Issue assignees
CycleIssue.objects.bulk_create(
bulk_cycle_issues, batch_size=1000, ignore_conflicts=True
)
def create_module_issues(workspace, project, user_id, issue_count):
# assignees
modules = Module.objects.filter(project=project).values_list(
"id", flat=True
)
issues = random.sample(
list(
Issue.objects.filter(project=project).values_list("id", flat=True)
),
int(issue_count / 2),
)
# Bulk issue
bulk_module_issues = []
for issue in issues:
module = modules[random.randint(0, len(modules) - 1)]
bulk_module_issues.append(
ModuleIssue(
module_id=module,
issue_id=issue,
project=project,
workspace=workspace,
)
)
# Issue assignees
ModuleIssue.objects.bulk_create(
bulk_module_issues, batch_size=1000, ignore_conflicts=True
)
@shared_task
def create_fake_data(
slug, email, members, issue_count, cycle_count, module_count
):
workspace = Workspace.objects.get(slug=slug)
user = User.objects.get(email=email)
user_id = user.id
# create workspace members
print("creating workspace members")
create_workspace_members(workspace=workspace, members=members)
print("Done creating workspace members")
# Create a project
print("Creating project")
project = create_project(workspace=workspace, user_id=user_id)
print("Done creating projects")
# create project members
print("Creating project members")
create_project_members(
workspace=workspace, project=project, members=members
)
print("Done creating project members")
# Create states
print("Creating states")
_ = create_states(workspace=workspace, project=project, user_id=user_id)
print("Done creating states")
# Create labels
print("Creating labels")
_ = create_labels(workspace=workspace, project=project, user_id=user_id)
print("Done creating labels")
# create cycles
print("Creating cycles")
_ = create_cycles(
workspace=workspace,
project=project,
user_id=user_id,
cycle_count=cycle_count,
)
print("Done creating cycles")
# create modules
print("Creating modules")
_ = create_modules(
workspace=workspace,
project=project,
user_id=user_id,
module_count=module_count,
)
print("Done creating modules")
print("Creating issues")
create_issues(
workspace=workspace,
project=project,
user_id=user_id,
issue_count=issue_count,
)
print("Done creating issues")
print("Creating parent and sub issues")
create_issue_parent(
workspace=workspace,
project=project,
user_id=user_id,
issue_count=issue_count,
)
print("Done creating parent and sub issues")
print("Creating issue assignees")
create_issue_assignees(
workspace=workspace,
project=project,
user_id=user_id,
issue_count=issue_count,
)
print("Done creating issue assignees")
print("Creating issue labels")
create_issue_labels(
workspace=workspace,
project=project,
user_id=user_id,
issue_count=issue_count,
)
print("Done creating issue labels")
print("Creating cycle issues")
create_cycle_issues(
workspace=workspace,
project=project,
user_id=user_id,
issue_count=issue_count,
)
print("Done creating cycle issues")
print("Creating module issues")
create_module_issues(
workspace=workspace,
project=project,
user_id=user_id,
issue_count=issue_count,
)
print("Done creating module issues")
return

View File

@@ -30,7 +30,7 @@ def posthogConfiguration():
@shared_task
def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
def track_event(email, event_name, properties):
try:
POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration()
@@ -39,43 +39,7 @@ def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
posthog.capture(
email,
event=event_name,
properties={
"event_id": uuid.uuid4().hex,
"user": {"email": email, "id": str(user)},
"device_ctx": {
"ip": ip,
"user_agent": user_agent,
},
"medium": medium,
"first_time": first_time,
},
)
except Exception as e:
log_exception(e)
return
@shared_task
def workspace_invite_event(
user, email, user_agent, ip, event_name, accepted_from
):
try:
POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration()
if POSTHOG_API_KEY and POSTHOG_HOST:
posthog = Posthog(POSTHOG_API_KEY, host=POSTHOG_HOST)
posthog.capture(
email,
event=event_name,
properties={
"event_id": uuid.uuid4().hex,
"user": {"email": email, "id": str(user)},
"device_ctx": {
"ip": ip,
"user_agent": user_agent,
},
"accepted_from": accepted_from,
},
properties=properties,
)
except Exception as e:
log_exception(e)

View File

@@ -0,0 +1,212 @@
# Python imports
import json
import requests
import uuid
# Django imports
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
from django.contrib.auth.hashers import make_password
# Third Party imports
from celery import shared_task
from sentry_sdk import capture_exception
# Module imports
from plane.app.serializers import ImporterSerializer
from plane.db.models import (
Importer,
WorkspaceMember,
GithubRepositorySync,
GithubRepository,
ProjectMember,
WorkspaceIntegration,
Label,
User,
IssueProperty,
UserNotificationPreference,
)
from plane.bgtasks.user_welcome_task import send_welcome_slack
@shared_task
def service_importer(service, importer_id):
try:
importer = Importer.objects.get(pk=importer_id)
importer.status = "processing"
importer.save()
users = importer.data.get("users", [])
# Check if we need to import users as well
if len(users):
# For all invited users create the users
new_users = User.objects.bulk_create(
[
User(
email=user.get("email").strip().lower(),
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
)
for user in users
if user.get("import", False) == "invite"
],
batch_size=100,
ignore_conflicts=True,
)
_ = UserNotificationPreference.objects.bulk_create(
[UserNotificationPreference(user=user) for user in new_users],
batch_size=100,
)
_ = [
send_welcome_slack.delay(
str(user.id),
True,
f"{user.email} was imported to Plane from {service}",
)
for user in new_users
]
workspace_users = User.objects.filter(
email__in=[
user.get("email").strip().lower()
for user in users
if user.get("import", False) == "invite"
or user.get("import", False) == "map"
]
)
# Check if any of the users are already member of workspace
_ = WorkspaceMember.objects.filter(
member__in=[user for user in workspace_users],
workspace_id=importer.workspace_id,
).update(is_active=True)
# Add new users to Workspace and project automatically
WorkspaceMember.objects.bulk_create(
[
WorkspaceMember(
member=user,
workspace_id=importer.workspace_id,
created_by=importer.created_by,
)
for user in workspace_users
],
batch_size=100,
ignore_conflicts=True,
)
ProjectMember.objects.bulk_create(
[
ProjectMember(
project_id=importer.project_id,
workspace_id=importer.workspace_id,
member=user,
created_by=importer.created_by,
)
for user in workspace_users
],
batch_size=100,
ignore_conflicts=True,
)
IssueProperty.objects.bulk_create(
[
IssueProperty(
project_id=importer.project_id,
workspace_id=importer.workspace_id,
user=user,
created_by=importer.created_by,
)
for user in workspace_users
],
batch_size=100,
ignore_conflicts=True,
)
# Check if sync config is on for github importers
if service == "github" and importer.config.get("sync", False):
name = importer.metadata.get("name", False)
url = importer.metadata.get("url", False)
config = importer.metadata.get("config", {})
owner = importer.metadata.get("owner", False)
repository_id = importer.metadata.get("repository_id", False)
workspace_integration = WorkspaceIntegration.objects.get(
workspace_id=importer.workspace_id,
integration__provider="github",
)
# Delete the old repository object
GithubRepositorySync.objects.filter(
project_id=importer.project_id
).delete()
GithubRepository.objects.filter(
project_id=importer.project_id
).delete()
# Create a Label for github
label = Label.objects.filter(
name="GitHub", project_id=importer.project_id
).first()
if label is None:
label = Label.objects.create(
name="GitHub",
project_id=importer.project_id,
description="Label to sync Plane issues with GitHub issues",
color="#003773",
)
# Create repository
repo = GithubRepository.objects.create(
name=name,
url=url,
config=config,
repository_id=repository_id,
owner=owner,
project_id=importer.project_id,
)
# Create repo sync
_ = GithubRepositorySync.objects.create(
repository=repo,
workspace_integration=workspace_integration,
actor=workspace_integration.actor,
credentials=importer.data.get("credentials", {}),
project_id=importer.project_id,
label=label,
)
# Add bot as a member in the project
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor,
role=20,
project_id=importer.project_id,
)
if settings.PROXY_BASE_URL:
headers = {"Content-Type": "application/json"}
import_data_json = json.dumps(
ImporterSerializer(importer).data,
cls=DjangoJSONEncoder,
)
_ = requests.post(
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(importer.workspace_id)}/projects/{str(importer.project_id)}/importers/{str(service)}/",
json=import_data_json,
headers=headers,
)
return
except Exception as e:
importer = Importer.objects.get(pk=importer_id)
importer.status = "failed"
importer.save()
# Print logs if in DEBUG mode
if settings.DEBUG:
print(e)
capture_exception(e)
return

View File

@@ -42,8 +42,9 @@ def page_transaction(new_value, old_value, page_id):
new_transactions = []
deleted_transaction_ids = set()
# TODO - Add "issue-embed-component", "img", "todo" components
components = ["mention-component"]
# TODO - Add "img", "todo" components
components = ["mention-component", "issue-embed-component"]
for component in components:
old_mentions = extract_components(old_value, component)
new_mentions = extract_components(new_value, component)
@@ -57,7 +58,7 @@ def page_transaction(new_value, old_value, page_id):
transaction=mention["id"],
page_id=page_id,
entity_identifier=mention["entity_identifier"],
entity_name=mention["entity_name"],
entity_name=mention["entity_name"] if mention["entity_name"] else "issue",
workspace_id=page.workspace_id,
project_id=page.project_id,
created_at=timezone.now(),

View File

@@ -0,0 +1,36 @@
# Django imports
from django.conf import settings
# Third party imports
from celery import shared_task
from sentry_sdk import capture_exception
from slack_sdk import WebClient
from slack_sdk.errors import SlackApiError
# Module imports
from plane.db.models import User
@shared_task
def send_welcome_slack(user_id, created, message):
try:
instance = User.objects.get(pk=user_id)
if created and not instance.is_bot:
# Send message on slack as well
if settings.SLACK_BOT_TOKEN:
client = WebClient(token=settings.SLACK_BOT_TOKEN)
try:
_ = client.chat_postMessage(
channel="#trackers",
text=message,
)
except SlackApiError as e:
print(f"Got an error: {e.response['error']}")
return
except Exception as e:
# Print logs if in DEBUG mode
if settings.DEBUG:
print(e)
capture_exception(e)
return

View File

@@ -3,8 +3,11 @@ import logging
# Third party imports
from celery import shared_task
from slack_sdk import WebClient
from slack_sdk.errors import SlackApiError
# Django imports
from django.conf import settings
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
@@ -15,6 +18,18 @@ from plane.license.utils.instance_value import get_email_configuration
from plane.utils.exception_logger import log_exception
def push_updated_to_slack(workspace, workspace_member_invite):
# Send message on slack as well
client = WebClient(token=settings.SLACK_BOT_TOKEN)
try:
_ = client.chat_postMessage(
channel="#trackers",
text=f"{workspace_member_invite.email} has been invited to {workspace.name} as a {workspace_member_invite.role}",
)
except SlackApiError as e:
print(f"Got an error: {e.response['error']}")
@shared_task
def workspace_invitation(email, workspace_id, token, current_site, invitor):
try:
@@ -80,6 +95,10 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
msg.send()
logging.getLogger("plane").info("Email sent succesfully")
# Send message on slack as well
if settings.SLACK_BOT_TOKEN:
push_updated_to_slack(workspace, workspace_member_invite)
return
except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist) as e:
log_exception(e)

View File

@@ -0,0 +1,79 @@
# Django imports
from typing import Any
from django.core.management.base import BaseCommand, CommandError
# Module imports
from plane.db.models import User, Workspace, WorkspaceMember
class Command(BaseCommand):
help = "Create dump issues, cycles etc. for a project in a given workspace"
def handle(self, *args: Any, **options: Any) -> str | None:
try:
workspace_name = input("Workspace Name: ")
workspace_slug = input("Workspace slug: ")
if workspace_slug == "":
raise CommandError("Workspace slug is required")
if Workspace.objects.filter(slug=workspace_slug).exists():
raise CommandError("Workspace already exists")
creator = input("Your email: ")
if (
creator == ""
or not User.objects.filter(email=creator).exists()
):
raise CommandError(
"User email is required and should be existing in Database"
)
user = User.objects.get(email=creator)
members = input("Enter Member emails (comma separated): ")
members = members.split(",") if members != "" else []
issue_count = int(
input("Number of issues to be created: ")
)
cycle_count = int(
input("Number of cycles to be created: ")
)
module_count = int(
input("Number of modules to be created: ")
)
# Create workspace
workspace = Workspace.objects.create(
slug=workspace_slug,
name=workspace_name,
owner=user,
)
# Create workspace member
WorkspaceMember.objects.create(
workspace=workspace, role=20, member=user
)
from plane.bgtasks.create_faker import create_fake_data
create_fake_data.delay(
slug=workspace_slug,
email=creator,
members=members,
issue_count=issue_count,
cycle_count=cycle_count,
module_count=module_count,
)
self.stdout.write(
self.style.SUCCESS("Data is pushed to the queue")
)
return
except Exception as e:
self.stdout.write(
self.style.ERROR(f"Command errored out {str(e)}")
)
return

View File

@@ -0,0 +1,68 @@
# Python imports
import os
import requests
from requests.exceptions import RequestException
# Django imports
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = "Check the license of the instance with Prime Server"
def handle(self, *args, **options):
try:
# Verify the license key
prime_host = os.environ.get("PRIME_HOST", False)
machine_signature = os.environ.get("MACHINE_SIGNATURE", False)
license_key = os.environ.get("LICENSE_KEY", False)
deploy_platform = os.environ.get("DEPLOY_PLATFORM", False)
domain = os.environ.get("LICENSE_DOMAIN", False)
# If any of the above is not provided raise a command error
if not prime_host or not machine_signature or not license_key:
raise CommandError("Invalid license key provided")
# Check with the license server
response = requests.post(
f"{prime_host}/api/validate/",
headers={
"Content-Type": "application/json",
"X-Api-Key": str(license_key),
},
json={
"machine_signature": str(machine_signature),
},
)
# Check if status code is 204
if response.status_code == 204:
self.stdout.write(
self.style.SUCCESS("License key verified successfully")
)
return
elif response.status_code == 400:
if deploy_platform == "KUBERNETES":
response = requests.post(
f"{prime_host}/api/kubernetes-setup/",
headers={
"Content-Type": "application/json",
"X-Api-Key": str(license_key),
},
json={
"machine_signature": str(machine_signature),
"domain": domain,
},
)
self.stdout.write(
self.style.SUCCESS("Instance created successfully")
)
return
else:
raise CommandError("Instance does not exist")
else:
raise CommandError("Invalid license key provided")
except RequestException:
raise CommandError("Could not verify the license key")

View File

@@ -15,9 +15,15 @@ from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils import timezone
from django.conf import settings
# Third party imports
from sentry_sdk import capture_exception
from slack_sdk import WebClient
from slack_sdk.errors import SlackApiError
# Module imports
from ..mixins import TimeAuditModel
from plane.db.mixins import TimeAuditModel
def get_default_onboarding():
@@ -215,3 +221,23 @@ def create_user_notification(sender, instance, created, **kwargs):
mention=False,
issue_completed=False,
)
@receiver(post_save, sender=User)
def send_welcome_slack(sender, instance, created, **kwargs):
try:
if created and not instance.is_bot:
# Send message on slack as well
if settings.SLACK_BOT_TOKEN:
client = WebClient(token=settings.SLACK_BOT_TOKEN)
try:
_ = client.chat_postMessage(
channel="#trackers",
text=f"New user {instance.email} has signed up and begun the onboarding journey.",
)
except SlackApiError as e:
print(f"Got an error: {e.response['error']}")
return
except Exception as e:
capture_exception(e)
return

View File

@@ -0,0 +1,23 @@
from pymongo import MongoClient
def singleton(cls):
instances = {}
def wrapper(*args, **kwargs):
if cls not in instances:
instances[cls] = cls(*args, **kwargs)
return instances[cls]
return wrapper
@singleton
class Database:
db = None
client = None
def __init__(self, mongo_uri, database_name):
self.client = MongoClient(mongo_uri)
self.db = self.client[database_name]
def get_db(self):
return self.db

View File

@@ -53,6 +53,10 @@ class InstanceEndpoint(BaseAPIView):
(
IS_GOOGLE_ENABLED,
IS_GITHUB_ENABLED,
IS_OIDC_ENABLED,
OIDC_PROVIDER_NAME,
IS_SAML_ENABLED,
SAML_PROVIDER_NAME,
GITHUB_APP_NAME,
EMAIL_HOST,
ENABLE_MAGIC_LINK_LOGIN,
@@ -72,6 +76,22 @@ class InstanceEndpoint(BaseAPIView):
"key": "IS_GITHUB_ENABLED",
"default": os.environ.get("IS_GITHUB_ENABLED", "0"),
},
{
"key": "IS_OIDC_ENABLED",
"default": os.environ.get("IS_OIDC_ENABLED", "0"),
},
{
"key": "OIDC_PROVIDER_NAME",
"default": os.environ.get("OIDC_PROVIDER_NAME", ""),
},
{
"key": "IS_SAML_ENABLED",
"default": os.environ.get("IS_SAML_ENABLED", "0"),
},
{
"key": "SAML_PROVIDER_NAME",
"default": os.environ.get("SAML_PROVIDER_NAME", ""),
},
{
"key": "GITHUB_APP_NAME",
"default": os.environ.get("GITHUB_APP_NAME", ""),
@@ -117,6 +137,10 @@ class InstanceEndpoint(BaseAPIView):
data["is_github_enabled"] = IS_GITHUB_ENABLED == "1"
data["is_magic_login_enabled"] = ENABLE_MAGIC_LINK_LOGIN == "1"
data["is_email_password_enabled"] = ENABLE_EMAIL_PASSWORD == "1"
data["is_oidc_enabled"] = IS_OIDC_ENABLED == "1"
data["oidc_provider_name"] = OIDC_PROVIDER_NAME
data["is_saml_enabled"] = IS_SAML_ENABLED == "1"
data["saml_provider_name"] = SAML_PROVIDER_NAME
# Github app name
data["github_app_name"] = str(GITHUB_APP_NAME)

View File

@@ -119,6 +119,92 @@ class Command(BaseCommand):
"category": "UNSPLASH",
"is_encrypted": True,
},
## OIDC
{
"key": "OIDC_CLIENT_ID",
"value": "",
"category": "AUTHENTICATION",
"is_encrypted": False,
},
{
"key": "OIDC_CLIENT_SECRET",
"value": "",
"category": "AUTHENTICATION",
"is_encrypted": True,
},
{
"key": "OIDC_TOKEN_URL",
"value": "",
"category": "AUTHENTICATION",
"is_encrypted": False,
},
{
"key": "OIDC_USERINFO_URL",
"value": "",
"category": "AUTHENTICATION",
"is_encrypted": False,
},
{
"key": "OIDC_AUTHORIZE_URL",
"value": "",
"category": "AUTHENTICATION",
"is_encrypted": False,
},
{
"key": "IS_OIDC_ENABLED",
"value": "0",
"category": "AUTHENTICATION",
"is_encrypted": False,
},
{
"key": "OIDC_LOGOUT_URL",
"value": "",
"category": "AUTHENTICATION",
"is_encrypted": False,
},
{
"key": "OIDC_PROVIDER_NAME",
"value": "",
"category": "AUTHENTICATION",
"is_encrypted": False,
},
## SAML
{
"key": "SAML_ENTITY_ID",
"value": "",
"category": "AUTHENTICATION",
"is_encrypted": False,
},
{
"key": "SAML_SSO_URL",
"value": "",
"category": "AUTHENTICATION",
"is_encrypted": False,
},
{
"key": "SAML_CERTIFICATE",
"value": "",
"category": "AUTHENTICATION",
"is_encrypted": True,
},
{
"key": "SAML_LOGOUT_URL",
"value": "",
"category": "AUTHENTICATION",
"is_encrypted": False,
},
{
"key": "IS_SAML_ENABLED",
"value": "0",
"category": "AUTHENTICATION",
"is_encrypted": False,
},
{
"key": "SAML_PROVIDER_NAME",
"value": "",
"category": "AUTHENTICATION",
"is_encrypted": False,
},
]
for item in config_keys:

View File

@@ -0,0 +1,79 @@
# Python imports
import json
import secrets
import uuid
# Django imports
from django.contrib.auth.hashers import make_password
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
# Module imports
from plane.license.models import Instance, InstanceAdmin
from plane.db.models import User, Profile
class Command(BaseCommand):
help = "Check if instance in registered else register"
def add_arguments(self, parser):
# Positional argument
parser.add_argument("admin_email", type=str, help="Admin Email")
def handle(self, *args, **options):
with open("package.json", "r") as file:
# Load JSON content from the file
data = json.load(file)
admin_email = options.get("admin_email", False)
if not admin_email:
raise CommandError("admin email is required")
user_count = User.objects.filter(is_bot=False).count()
user = User.objects.filter(email=admin_email).first()
if user is None:
user = User.objects.create(
email=admin_email,
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
)
_ = Profile.objects.create(user=user)
try:
# Check if the instance is registered
instance = Instance.objects.first()
if instance is None:
instance = Instance.objects.create(
instance_name="Plane Enterprise",
instance_id=secrets.token_hex(12),
license_key=None,
api_key=secrets.token_hex(8),
version=data.get("version"),
last_checked_at=timezone.now(),
user_count=user_count,
is_verified=True,
is_setup_done=True,
is_signup_screen_visited=True,
)
# Get or create an instance admin
_, created = InstanceAdmin.objects.get_or_create(
user=user,
instance=instance,
defaults={"role": 20, "is_verified": True},
)
if not created:
self.stdout.write(
self.style.WARNING(
"given email is already an instance admin"
)
)
self.stdout.write(self.style.SUCCESS("Successful"))
except Exception as e:
print(e)
raise CommandError("Failure")

View File

@@ -38,6 +38,5 @@ class APITokenLogMiddleware:
except Exception as e:
print(e)
# If the token does not exist, you can decide whether to log this as an invalid attempt
return None

View File

@@ -295,7 +295,7 @@ if bool(os.environ.get("SENTRY_DSN", False)) and os.environ.get(
# Application Envs
PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False) # For External
SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN", False)
FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
# Unsplash Access key
@@ -325,6 +325,9 @@ SKIP_ENV_VAR = os.environ.get("SKIP_ENV_VAR", "1") == "1"
DATA_UPLOAD_MAX_MEMORY_SIZE = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
# MongoDB Settings
MONGO_DB_URL = os.environ.get("MONGO_DB_URL", False)
# Cookie Settings
SESSION_COOKIE_SECURE = secure_origins
SESSION_COOKIE_HTTPONLY = True

View File

@@ -12,6 +12,10 @@ SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
INSTALLED_APPS += ("scout_apm.django",) # noqa
if os.environ.get("IS_MULTI_CLOUD"):
SECURE_SSL_REDIRECT = True
else:
SECURE_SSL_REDIRECT = False
# Scout Settings
SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False)

View File

@@ -0,0 +1,117 @@
import requests
import re
from requests.auth import HTTPBasicAuth
from sentry_sdk import capture_exception
from urllib.parse import urlparse, urljoin
def is_allowed_hostname(hostname):
allowed_domains = [
"atl-paas.net",
"atlassian.com",
"atlassian.net",
"jira.com",
]
parsed_uri = urlparse(f"https://{hostname}")
domain = parsed_uri.netloc.split(":")[0] # Ensures no port is included
base_domain = ".".join(domain.split(".")[-2:])
return base_domain in allowed_domains
def is_valid_project_key(project_key):
if project_key:
project_key = project_key.strip().upper()
# Adjust the regular expression as needed based on your specific requirements.
if len(project_key) > 30:
return False
# Check the validity of the key as well
pattern = re.compile(r"^[A-Z0-9]{1,10}$")
return pattern.match(project_key) is not None
else:
False
def generate_valid_project_key(project_key):
return project_key.strip().upper()
def generate_url(hostname, path):
if not is_allowed_hostname(hostname):
raise ValueError("Invalid or unauthorized hostname")
return urljoin(f"https://{hostname}", path)
def jira_project_issue_summary(email, api_token, project_key, hostname):
try:
if not is_allowed_hostname(hostname):
return {"error": "Invalid or unauthorized hostname"}
if not is_valid_project_key(project_key):
return {"error": "Invalid project key"}
auth = HTTPBasicAuth(email, api_token)
headers = {"Accept": "application/json"}
# make the project key upper case
project_key = generate_valid_project_key(project_key)
# issues
issue_url = generate_url(
hostname,
f"/rest/api/3/search?jql=project={project_key} AND issuetype!=Epic",
)
issue_response = requests.request(
"GET", issue_url, headers=headers, auth=auth
).json()["total"]
# modules
module_url = generate_url(
hostname,
f"/rest/api/3/search?jql=project={project_key} AND issuetype=Epic",
)
module_response = requests.request(
"GET", module_url, headers=headers, auth=auth
).json()["total"]
# status
status_url = generate_url(
hostname, f"/rest/api/3/project/${project_key}/statuses"
)
status_response = requests.request(
"GET", status_url, headers=headers, auth=auth
).json()
# labels
labels_url = generate_url(
hostname, f"/rest/api/3/label/?jql=project={project_key}"
)
labels_response = requests.request(
"GET", labels_url, headers=headers, auth=auth
).json()["total"]
# users
users_url = generate_url(
hostname, f"/rest/api/3/users/search?jql=project={project_key}"
)
users_response = requests.request(
"GET", users_url, headers=headers, auth=auth
).json()
return {
"issues": issue_response,
"modules": module_response,
"labels": labels_response,
"states": len(status_response),
"users": (
[
user
for user in users_response
if user.get("accountType") == "atlassian"
]
),
}
except Exception as e:
capture_exception(e)
return {
"error": "Something went wrong could not fetch information from jira"
}

View File

@@ -0,0 +1,154 @@
import os
import jwt
import requests
from urllib.parse import urlparse, parse_qs
from datetime import datetime, timedelta
from cryptography.hazmat.primitives.serialization import load_pem_private_key
from cryptography.hazmat.backends import default_backend
from django.conf import settings
def get_jwt_token():
app_id = os.environ.get("GITHUB_APP_ID", "")
secret = bytes(
os.environ.get("GITHUB_APP_PRIVATE_KEY", ""), encoding="utf8"
)
current_timestamp = int(datetime.now().timestamp())
due_date = datetime.now() + timedelta(minutes=10)
expiry = int(due_date.timestamp())
payload = {
"iss": app_id,
"sub": app_id,
"exp": expiry,
"iat": current_timestamp,
"aud": "https://github.com/login/oauth/access_token",
}
priv_rsakey = load_pem_private_key(secret, None, default_backend())
token = jwt.encode(payload, priv_rsakey, algorithm="RS256")
return token
def get_github_metadata(installation_id):
token = get_jwt_token()
url = f"https://api.github.com/app/installations/{installation_id}"
headers = {
"Authorization": "Bearer " + str(token),
"Accept": "application/vnd.github+json",
}
response = requests.get(url, headers=headers).json()
return response
def get_github_repos(access_tokens_url, repositories_url):
token = get_jwt_token()
headers = {
"Authorization": "Bearer " + str(token),
"Accept": "application/vnd.github+json",
}
oauth_response = requests.post(
access_tokens_url,
headers=headers,
).json()
oauth_token = oauth_response.get("token", "")
headers = {
"Authorization": "Bearer " + str(oauth_token),
"Accept": "application/vnd.github+json",
}
response = requests.get(
repositories_url,
headers=headers,
).json()
return response
def delete_github_installation(installation_id):
token = get_jwt_token()
url = f"https://api.github.com/app/installations/{installation_id}"
headers = {
"Authorization": "Bearer " + str(token),
"Accept": "application/vnd.github+json",
}
response = requests.delete(url, headers=headers)
return response
def get_github_repo_details(access_tokens_url, owner, repo):
token = get_jwt_token()
headers = {
"Authorization": "Bearer " + str(token),
"Accept": "application/vnd.github+json",
"X-GitHub-Api-Version": "2022-11-28",
}
oauth_response = requests.post(
access_tokens_url,
headers=headers,
).json()
oauth_token = oauth_response.get("token")
headers = {
"Authorization": "Bearer " + oauth_token,
"Accept": "application/vnd.github+json",
}
open_issues = requests.get(
f"https://api.github.com/repos/{owner}/{repo}",
headers=headers,
).json()["open_issues_count"]
total_labels = 0
labels_response = requests.get(
f"https://api.github.com/repos/{owner}/{repo}/labels?per_page=100&page=1",
headers=headers,
)
# Check if there are more pages
if len(labels_response.links.keys()):
# get the query parameter of last
last_url = labels_response.links.get("last").get("url")
parsed_url = urlparse(last_url)
last_page_value = parse_qs(parsed_url.query)["page"][0]
total_labels = total_labels + 100 * (int(last_page_value) - 1)
# Get labels in last page
last_page_labels = requests.get(last_url, headers=headers).json()
total_labels = total_labels + len(last_page_labels)
else:
total_labels = len(labels_response.json())
# Currently only supporting upto 100 collaborators
# TODO: Update this function to fetch all collaborators
collaborators = requests.get(
f"https://api.github.com/repos/{owner}/{repo}/collaborators?per_page=100&page=1",
headers=headers,
).json()
return open_issues, total_labels, collaborators
def get_release_notes():
token = settings.GITHUB_ACCESS_TOKEN
if token:
headers = {
"Authorization": "Bearer " + str(token),
"Accept": "application/vnd.github.v3+json",
}
else:
headers = {
"Accept": "application/vnd.github.v3+json",
}
url = "https://api.github.com/repos/makeplane/plane/releases?per_page=5&page=1"
response = requests.get(url, headers=headers)
if response.status_code != 200:
return {"error": "Unable to render information from Github Repository"}
return response.json()

View File

@@ -0,0 +1,21 @@
import os
import requests
def slack_oauth(code):
SLACK_OAUTH_URL = os.environ.get("SLACK_OAUTH_URL", False)
SLACK_CLIENT_ID = os.environ.get("SLACK_CLIENT_ID", False)
SLACK_CLIENT_SECRET = os.environ.get("SLACK_CLIENT_SECRET", False)
# Oauth Slack
if SLACK_OAUTH_URL and SLACK_CLIENT_ID and SLACK_CLIENT_SECRET:
response = requests.get(
SLACK_OAUTH_URL,
params={
"code": code,
"client_id": SLACK_CLIENT_ID,
"client_secret": SLACK_CLIENT_SECRET,
},
)
return response.json()
return {}

View File

@@ -9,6 +9,8 @@ psycopg==3.1.18
psycopg-binary==3.1.18
psycopg-c==3.1.18
dj-database-url==2.1.0
# mongo
pymongo==4.6.3
# redis
redis==5.0.4
django-redis==5.4.0
@@ -60,4 +62,6 @@ zxcvbn==4.4.28
# timezone
pytz==2024.1
# jwt
PyJWT==2.8.0
PyJWT==2.8.0
# SAML
python3-saml==1.16.0

View File

@@ -0,0 +1,31 @@
(plane_proxy) {
request_body {
max_size {$FILE_SIZE_LIMIT}
}
reverse_proxy /spaces/* space:3000
reverse_proxy /god-mode/* admin:3000
reverse_proxy /api/* api:8000
reverse_proxy /auth/* api:8000
reverse_proxy /{$BUCKET_NAME}/* plane-minio:9000
reverse_proxy /* web:3000
}
{
email {$CERT_EMAIL:admin@example.com}
acme_ca {$CERT_ACME_CA}
{$CERT_ACME_DNS}
servers {
max_header_size 5MB
client_ip_headers X-Forwarded-For X-Real-IP
}
}
{$SITE_ADDRESS} {
import plane_proxy
}

View File

@@ -0,0 +1,169 @@
x-proxy-env: &proxy-env
environment:
- SITE_ADDRESS=${SITE_ADDRESS:-localhost:80}
- CERT_EMAIL=${CERT_EMAIL:-admin@example.com}
- CERT_ACME_CA=${CERT_ACME_CA:-}
- CERT_ACME_DNS=${CERT_ACME_DNS:-}
- BUCKET_NAME=${BUCKET_NAME:-uploads}
- FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
- LISTEN_HTTP_PORT=${LISTEN_HTTP_PORT:-80}
- LISTEN_HTTPS_PORT=${LISTEN_HTTPS_PORT:-443}
x-app-env: &app-env
environment:
- WEB_URL=${WEB_URL:-http://localhost}
- DEBUG=${DEBUG:-0}
- SENTRY_DSN=${SENTRY_DSN:-""}
- SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT:-"production"}
- CORS_ALLOWED_ORIGINS=${CORS_ALLOWED_ORIGINS:-}
# Gunicorn Workers
- GUNICORN_WORKERS=${GUNICORN_WORKERS:-2}
#DB SETTINGS
- PGHOST=${PGHOST:-plane-db}
- PGDATABASE=${PGDATABASE:-plane}
- POSTGRES_USER=${POSTGRES_USER:-plane}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-plane}
- POSTGRES_DB=${POSTGRES_DB:-plane}
- PGDATA=${PGDATA:-/var/lib/postgresql/data}
- DATABASE_URL=${DATABASE_URL:-postgresql://plane:plane@plane-db/plane}
# REDIS SETTINGS
- REDIS_HOST=${REDIS_HOST:-plane-redis}
- REDIS_PORT=${REDIS_PORT:-6379}
- REDIS_URL=${REDIS_URL:-redis://plane-redis:6379/}
# Application secret
- SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5}
# DATA STORE SETTINGS
- USE_MINIO=${USE_MINIO:-1}
- AWS_REGION=${AWS_REGION:-""}
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-"access-key"}
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-"secret-key"}
- AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000}
- AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads}
- MINIO_ROOT_USER=${MINIO_ROOT_USER:-"access-key"}
- MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-"secret-key"}
- BUCKET_NAME=${BUCKET_NAME:-uploads}
- FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
services:
admin:
<<: *app-env
image: registry.plane.tools/plane/admin-enterprise:${APP_RELEASE_VERSION}
restart: unless-stopped
command: node admin/server.js admin
deploy:
replicas: ${ADMIN_REPLICAS:-1}
depends_on:
- api
- web
web:
<<: *app-env
image: registry.plane.tools/plane/web-enterprise:${APP_RELEASE_VERSION}
restart: unless-stopped
command: node web/server.js web
deploy:
replicas: ${WEB_REPLICAS:-1}
depends_on:
- api
- worker
space:
<<: *app-env
image: registry.plane.tools/plane/space-enterprise:${APP_RELEASE_VERSION}
restart: unless-stopped
command: node space/server.js space
deploy:
replicas: ${SPACE_REPLICAS:-1}
depends_on:
- api
- worker
- web
api:
<<: *app-env
image: registry.plane.tools/plane/backend-enterprise:${APP_RELEASE_VERSION}
restart: unless-stopped
command: ./bin/docker-entrypoint-api-ee.sh
deploy:
replicas: ${API_REPLICAS:-1}
volumes:
- ${INSTALL_DIR}/logs/api:/code/plane/logs
depends_on:
- plane-db
- plane-redis
worker:
<<: *app-env
image: registry.plane.tools/plane/backend-enterprise:${APP_RELEASE_VERSION}
restart: unless-stopped
command: ./bin/docker-entrypoint-worker.sh
volumes:
- ${INSTALL_DIR}/logs/worker:/code/plane/logs
depends_on:
- api
- plane-db
- plane-redis
beat-worker:
<<: *app-env
image: registry.plane.tools/plane/backend-enterprise:${APP_RELEASE_VERSION}
restart: unless-stopped
command: ./bin/docker-entrypoint-beat.sh
volumes:
- ${INSTALL_DIR}/logs/beat-worker:/code/plane/logs
depends_on:
- api
- plane-db
- plane-redis
migrator:
<<: *app-env
image: registry.plane.tools/plane/backend-enterprise:${APP_RELEASE_VERSION}
restart: "no"
command: ./bin/docker-entrypoint-migrator.sh
volumes:
- ${INSTALL_DIR}/logs/migrator:/code/plane/logs
depends_on:
- plane-db
- plane-redis
plane-db:
<<: *app-env
image: registry.plane.tools/plane/postgres:15.5-alpine
restart: unless-stopped
command: postgres -c 'max_connections=1000'
volumes:
- ${INSTALL_DIR}/data/db:/var/lib/postgresql/data
plane-redis:
<<: *app-env
image: registry.plane.tools/plane/redis:7.2.4-alpine
restart: unless-stopped
volumes:
- ${INSTALL_DIR}/data/redis:/data
plane-minio:
<<: *app-env
image: registry.plane.tools/plane/minio:latest
restart: unless-stopped
command: server /export --console-address ":9090"
volumes:
- ${INSTALL_DIR}/data/minio/uploads:/export
- ${INSTALL_DIR}/data/minio/data:/data
# Comment this if you already have a reverse proxy running
proxy:
<<: *proxy-env
image: registry.plane.tools/plane/caddy:latest
restart: unless-stopped
ports:
- ${LISTEN_HTTP_PORT:-80}:${LISTEN_HTTP_PORT:-80}
- ${LISTEN_HTTPS_PORT:-443}:${LISTEN_HTTPS_PORT:-443}
volumes:
- ${INSTALL_DIR}/Caddyfile:/etc/caddy/Caddyfile
- ${INSTALL_DIR}/caddy/config:/config
- ${INSTALL_DIR}/caddy/data:/data
depends_on:
- web
- api
- space

View File

@@ -0,0 +1,166 @@
# version: "3.8"
x-app-env: &app-env
environment:
- LISTEN_HTTP_PORT=${LISTEN_HTTP_PORT:-80}
- WEB_URL=${WEB_URL:-http://localhost}
- DEBUG=${DEBUG:-0}
- SENTRY_DSN=${SENTRY_DSN:-""}
- SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT:-"production"}
- CORS_ALLOWED_ORIGINS=${CORS_ALLOWED_ORIGINS:-}
# Gunicorn Workers
- GUNICORN_WORKERS=${GUNICORN_WORKERS:-2}
#DB SETTINGS
- PGHOST=${PGHOST:-plane-db}
- PGDATABASE=${PGDATABASE:-plane}
- POSTGRES_USER=${POSTGRES_USER:-plane}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-plane}
- POSTGRES_DB=${POSTGRES_DB:-plane}
- PGDATA=${PGDATA:-/var/lib/postgresql/data}
- DATABASE_URL=${DATABASE_URL:-postgresql://plane:plane@plane-db/plane}
# REDIS SETTINGS
- REDIS_HOST=${REDIS_HOST:-plane-redis}
- REDIS_PORT=${REDIS_PORT:-6379}
- REDIS_URL=${REDIS_URL:-redis://plane-redis:6379/}
# Application secret
- SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5}
# DATA STORE SETTINGS
- USE_MINIO=${USE_MINIO:-1}
- AWS_REGION=${AWS_REGION:-""}
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-"access-key"}
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-"secret-key"}
- AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000}
- AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads}
- MINIO_ROOT_USER=${MINIO_ROOT_USER:-"access-key"}
- MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-"secret-key"}
- BUCKET_NAME=${BUCKET_NAME:-uploads}
- FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
services:
admin:
<<: *app-env
image: registry.plane.tools/plane/admin-enterprise:${APP_RELEASE_VERSION}
restart: unless-stopped
command: node admin/server.js admin
deploy:
replicas: ${ADMIN_REPLICAS:-1}
depends_on:
- api
- web
web:
<<: *app-env
image: registry.plane.tools/plane/web-enterprise:${APP_RELEASE_VERSION}
pull_policy: if_not_present
restart: unless-stopped
command: node web/server.js web
deploy:
replicas: ${WEB_REPLICAS:-1}
depends_on:
- api
- worker
space:
<<: *app-env
image: registry.plane.tools/plane/space-enterprise:${APP_RELEASE_VERSION}
pull_policy: if_not_present
restart: unless-stopped
command: node space/server.js space
deploy:
replicas: ${SPACE_REPLICAS:-1}
depends_on:
- api
- worker
- web
api:
<<: *app-env
image: registry.plane.tools/plane/backend-enterprise:${APP_RELEASE_VERSION}
pull_policy: if_not_present
restart: unless-stopped
command: ./bin/docker-entrypoint-api-ee.sh
deploy:
replicas: ${API_REPLICAS:-1}
volumes:
- ${INSTALL_DIR}/logs/api:/code/plane/logs
depends_on:
- plane-db
- plane-redis
worker:
<<: *app-env
image: registry.plane.tools/plane/backend-enterprise:${APP_RELEASE_VERSION}
pull_policy: if_not_present
restart: unless-stopped
command: ./bin/docker-entrypoint-worker.sh
volumes:
- ${INSTALL_DIR}/logs/worker:/code/plane/logs
depends_on:
- api
- plane-db
- plane-redis
beat-worker:
<<: *app-env
image: registry.plane.tools/plane/backend-enterprise:${APP_RELEASE_VERSION}
pull_policy: if_not_present
restart: unless-stopped
command: ./bin/docker-entrypoint-beat.sh
volumes:
- ${INSTALL_DIR}/logs/beat-worker:/code/plane/logs
depends_on:
- api
- plane-db
- plane-redis
migrator:
<<: *app-env
image: registry.plane.tools/plane/backend-enterprise:${APP_RELEASE_VERSION}
pull_policy: if_not_present
restart: "no"
command: ./bin/docker-entrypoint-migrator.sh
volumes:
- ${INSTALL_DIR}/logs/migrator:/code/plane/logs
depends_on:
- plane-db
- plane-redis
plane-db:
<<: *app-env
image: registry.plane.tools/plane/postgres:15.5-alpine
pull_policy: if_not_present
restart: unless-stopped
command: postgres -c 'max_connections=1000'
volumes:
- ${INSTALL_DIR}/data/db:/var/lib/postgresql/data
plane-redis:
<<: *app-env
image: registry.plane.tools/plane/redis:7.2.4-alpine
pull_policy: if_not_present
restart: unless-stopped
volumes:
- ${INSTALL_DIR}/data/redis:/data
plane-minio:
<<: *app-env
image: registry.plane.tools/plane/minio:latest
pull_policy: if_not_present
restart: unless-stopped
command: server /export --console-address ":9090"
volumes:
- ${INSTALL_DIR}/data/minio/uploads:/export
- ${INSTALL_DIR}/data/minio/data:/data
# Comment this if you already have a reverse proxy running
proxy:
<<: *app-env
image: registry.plane.tools/plane/proxy-enterprise:${APP_RELEASE_VERSION}
pull_policy: if_not_present
restart: unless-stopped
ports:
- ${LISTEN_HTTP_PORT}:80
depends_on:
- web
- api
- space

View File

@@ -0,0 +1,65 @@
INSTALL_DIR=/opt/plane
DOMAIN_NAME=localhost
WEB_REPLICAS=1
SPACE_REPLICAS=1
ADMIN_REPLICAS=1
API_REPLICAS=1
LISTEN_HTTP_PORT=80
LISTEN_HTTPS_PORT=443
APP_PROTOCOL=http
# If SSL Cert to be generated, set CERT_EMAIL and APP_PROTOCOL to https
CERT_EMAIL=admin@example.com
CERT_ACME_CA=https://acme-v02.api.letsencrypt.org/directory
# if APP_PROTOCOL=http
# SITE_ADDRESS=http://[domain-name]:[listen-http-port]
# elif APP_PROTOCOL=https
# SITE_ADDRESS=[domain-name]:[listen-http-port]
# fi
SITE_ADDRESS=localhost:80
# For DNS Challenge based certificate generation, set the CERT_ACME_DNS
# CERT_ACME_DNS=acme_dns CERT_DNS_PROVIDER CERT_DNS_PROVIDER_API_KEY
CERT_ACME_DNS=
WEB_URL=http://localhost
DEBUG=0
SENTRY_DSN=
SENTRY_ENVIRONMENT=production
CORS_ALLOWED_ORIGINS=http://localhost
#DB SETTINGS
PGHOST=plane-db
PGDATABASE=plane
POSTGRES_USER=plane
POSTGRES_PASSWORD=plane
POSTGRES_DB=plane
PGDATA=/var/lib/postgresql/data
DATABASE_URL=
# REDIS SETTINGS
REDIS_HOST=plane-redis
REDIS_PORT=6379
REDIS_URL=
# Secret Key
SECRET_KEY=60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5
# DATA STORE SETTINGS
USE_MINIO=1
AWS_REGION=
AWS_ACCESS_KEY_ID=access-key
AWS_SECRET_ACCESS_KEY=secret-key
AWS_S3_ENDPOINT_URL=http://plane-minio:9000
AWS_S3_BUCKET_NAME=uploads
MINIO_ROOT_USER=access-key
MINIO_ROOT_PASSWORD=secret-key
BUCKET_NAME=uploads
FILE_SIZE_LIMIT=5242880
# Gunicorn Workers
GUNICORN_WORKERS=2

View File

@@ -52,6 +52,15 @@ services:
- plane-db
- plane-redis
mongodb:
image: "mongo"
restart: unless-stopped
volumes:
- "mongodb_data:/data/db"
environment:
- MONGO_INITDB_ROOT_USERNAME=${MONGO_INITDB_ROOT_USERNAME:-plane}
- MONGO_INITDB_ROOT_PASSWORD=${MONGO_INITDB_ROOT_PASSWORD:-plane}
worker:
container_name: bgworker
build:
@@ -154,3 +163,4 @@ volumes:
pgdata:
redisdata:
uploads:
mongodb_data:

85
generate_release_notes.sh Normal file
View File

@@ -0,0 +1,85 @@
#!/bin/bash
# Initialize temporary files for each category
FEATURES_FILE=$(mktemp)
IMPROVEMENTS_FILE=$(mktemp)
BUGS_FILE=$(mktemp)
OTHERS_FILE=$(mktemp)
FEATURES_COUNT=0
IMPROVEMENTS_COUNT=0
BUGS_COUNT=0
OTHERS_COUNT=0
# Check if there are any tags in the repository
if git describe --tags --abbrev=0 > /dev/null 2>&1; then
# Fetch all commits from the last tag to HEAD
COMMITS=$(git log $(git describe --tags --abbrev=0)..HEAD --pretty=format:"%s|%h")
else
# If no tags are found, list all commits
COMMITS=$(git log --pretty=format:"%s|%h")
fi
# Save IFS and set it to newline to handle commits correctly
OLD_IFS=$IFS
IFS=$'\n'
# Loop through each commit to categorize
for commit in $COMMITS; do
IFS="|" read -r commit_message hash <<< "$commit"
# Normalize commit message to handle case sensitivity
normalized_message=$(echo "$commit_message" | tr '[:upper:]' '[:lower:]')
# Skip commits that start with "merge" or "chore" or do not contain a PR number
if echo "$normalized_message" | grep -qE '^(merge|chore)'; then
continue
fi
# Extract PR number if present
PR_NUMBER=$(echo $commit_message | grep -o -E "#[0-9]+" || echo "")
if [[ -z "$PR_NUMBER" ]]; then
continue # Skip commits without a PR number
fi
# Format the commit message
CLEAN_MESSAGE=$(echo $commit_message | sed -E "s/#[0-9]+//; s/^(feat|refactor|fix|chore): //I; s/^([Ff]eat|[Rr]efactor|[Ff]ix|[Cc]hore) //I; s/^\[.*\] //; s/[:\-] / /; s/\(\) //")
CLEAN_MESSAGE="$(tr '[:lower:]' '[:upper:]' <<< ${CLEAN_MESSAGE:0:1})${CLEAN_MESSAGE:1}."
CLEAN_MESSAGE=$(echo $CLEAN_MESSAGE | sed 's/()//g') # Remove empty brackets
# Categorize and limit the number of commits under each heading
if [[ $FEATURES_COUNT -lt 30 && $normalized_message =~ ^feat ]]; then
echo "- $CLEAN_MESSAGE $PR_NUMBER" >> "$FEATURES_FILE"
((FEATURES_COUNT++))
elif [[ $IMPROVEMENTS_COUNT -lt 30 && $normalized_message =~ ^refactor ]]; then
echo "- $CLEAN_MESSAGE $PR_NUMBER" >> "$IMPROVEMENTS_FILE"
((IMPROVEMENTS_COUNT++))
elif [[ $BUGS_COUNT -lt 30 && $normalized_message =~ ^fix ]]; then
echo "- $CLEAN_MESSAGE $PR_NUMBER" >> "$BUGS_FILE"
((BUGS_COUNT++))
elif [[ $OTHERS_COUNT -lt 30 ]]; then
echo "- $CLEAN_MESSAGE $PR_NUMBER" >> "$OTHERS_FILE"
((OTHERS_COUNT++))
fi
done
# Restore IFS
IFS=$OLD_IFS
# Generate the release notes by concatenating the temporary files
{
echo '## What Changed'
echo "## Features"
cat "$FEATURES_FILE"
echo "## Improvements"
cat "$IMPROVEMENTS_FILE"
echo "## Bugs"
cat "$BUGS_FILE"
echo "## Others"
cat "$OTHERS_FILE"
} > RELEASE_NOTES.md
# Clean up temporary files
rm "$FEATURES_FILE" "$IMPROVEMENTS_FILE" "$BUGS_FILE" "$OTHERS_FILE"
echo "Release notes generated in RELEASE_NOTES.md"

View File

@@ -390,3 +390,7 @@ ul[data-type="taskList"] ul[data-type="taskList"] {
margin-top: 0;
}
/* end tailwind typography */
.ProseMirror .issue-embed img {
margin: 0 !important;
}

View File

@@ -14,9 +14,7 @@ export function CoreEditorProps(editorClassName: string): EditorProps {
// prevent default event listeners from firing when slash command is active
if (["ArrowUp", "ArrowDown", "Enter"].includes(event.key)) {
const slashCommand = document.querySelector("#slash-command");
if (slashCommand) {
return true;
}
if (slashCommand) return true;
}
},
},

View File

@@ -7,7 +7,7 @@ import { EditorRefApi, IMentionHighlight, IMentionSuggestion, TFileHandler, useE
// custom provider
import { CollaborationProvider } from "src/providers/collaboration-provider";
// extensions
import { DocumentEditorExtensions } from "src/ui/extensions";
import { DocumentEditorExtensions, TEmbedConfig } from "src/ui/extensions";
type DocumentEditorProps = {
id: string;
@@ -25,6 +25,7 @@ type DocumentEditorProps = {
placeholder?: string | ((isFocused: boolean, value: string) => string);
setHideDragHandleFunction: (hideDragHandlerFromDragDrop: () => void) => void;
tabIndex?: number;
embedHandler?: TEmbedConfig;
};
export const useDocumentEditor = ({
@@ -39,6 +40,7 @@ export const useDocumentEditor = ({
handleEditorReady,
mentionHandler,
placeholder,
embedHandler,
setHideDragHandleFunction,
}: DocumentEditorProps) => {
const provider = useMemo(
@@ -75,6 +77,7 @@ export const useDocumentEditor = ({
extensions: DocumentEditorExtensions({
uploadFile: fileHandler.upload,
setHideDragHandle: setHideDragHandleFunction,
issueEmbedConfig: embedHandler?.issue,
provider,
}),
placeholder,

View File

@@ -9,3 +9,5 @@ export { proseMirrorJSONToBinaryString, applyUpdates, mergeUpdates } from "src/u
export type { EditorRefApi, EditorReadOnlyRefApi, EditorMenuItem, EditorMenuItemNames } from "@plane/editor-core";
export type { IMarking } from "src/types/editor-types";
export type { TEmbedItem } from "src/ui/extensions/widgets/issue-embed/block/types";

View File

@@ -0,0 +1,79 @@
import Collaboration from "@tiptap/extension-collaboration";
import Placeholder from "@tiptap/extension-placeholder";
// plane imports
import { SlashCommand, DragAndDrop } from "@plane/editor-extensions";
import { UploadImage, ISlashCommandItem } from "@plane/editor-core";
// ui
import { LayersIcon } from "@plane/ui";
import { IssueEmbedSuggestions, IssueWidget, IssueListRenderer, TIssueEmbedConfig } from "src/ui/extensions";
import { CollaborationProvider } from "src/providers/collaboration-provider";
type TArguments = {
uploadFile: UploadImage;
setHideDragHandle?: (hideDragHandlerFromDragDrop: () => void) => void;
issueEmbedConfig?: TIssueEmbedConfig;
provider: CollaborationProvider;
};
export const DocumentEditorExtensions = (props: TArguments) => {
const { uploadFile, setHideDragHandle, issueEmbedConfig, provider } = props;
const additionalOptions: ISlashCommandItem[] = [
{
key: "issue_embed",
title: "Issue embed",
description: "Embed an issue from the project.",
searchTerms: ["issue", "link", "embed"],
icon: <LayersIcon className="h-3.5 w-3.5" />,
command: ({ editor, range }) => {
editor
.chain()
.focus()
.insertContentAt(
range,
"<p class='text-sm bg-gray-300 w-fit pl-3 pr-3 pt-1 pb-1 rounded shadow-sm'>#issue_</p>"
)
.run();
},
},
];
const extensions = [
SlashCommand(uploadFile, additionalOptions),
DragAndDrop(setHideDragHandle),
Placeholder.configure({
placeholder: ({ node }) => {
if (node.type.name === "heading") {
return `Heading ${node.attrs.level}`;
}
if (node.type.name === "image" || node.type.name === "table") {
return "";
}
return "Press '/' for commands...";
},
includeChildren: true,
}),
Collaboration.configure({
document: provider.document,
}),
];
if (issueEmbedConfig) {
extensions.push(
// @ts-expect-error resolve this
IssueWidget({
widgetCallback: issueEmbedConfig.widgetCallback,
}).configure({
issueEmbedConfig,
}),
IssueEmbedSuggestions.configure({
suggestion: {
render: () => IssueListRenderer(issueEmbedConfig.searchCallback),
},
})
);
}
return extensions;
};

View File

@@ -0,0 +1,2 @@
export * from "./widgets";
export * from "./extensions";

View File

@@ -1,21 +0,0 @@
import { IssueWidgetPlaceholder } from "src/ui/extensions/widgets/issue-embed-widget";
import { SlashCommand, DragAndDrop } from "@plane/editor-extensions";
import { UploadImage } from "@plane/editor-core";
import { CollaborationProvider } from "src/providers/collaboration-provider";
import Collaboration from "@tiptap/extension-collaboration";
type TArguments = {
uploadFile: UploadImage;
setHideDragHandle?: (hideDragHandlerFromDragDrop: () => void) => void;
provider: CollaborationProvider;
};
export const DocumentEditorExtensions = ({ uploadFile, setHideDragHandle, provider }: TArguments) => [
SlashCommand(uploadFile),
DragAndDrop(setHideDragHandle),
IssueWidgetPlaceholder(),
Collaboration.configure({
document: provider.document,
}),
];

View File

@@ -0,0 +1 @@
export * from "./issue-embed";

View File

@@ -1,54 +0,0 @@
import { Editor, Range } from "@tiptap/react";
import { IssueEmbedSuggestions } from "src/ui/extensions/widgets/issue-embed-suggestion-list/issue-suggestion-extension";
import { getIssueSuggestionItems } from "src/ui/extensions/widgets/issue-embed-suggestion-list/issue-suggestion-items";
import { IssueListRenderer } from "src/ui/extensions/widgets/issue-embed-suggestion-list/issue-suggestion-renderer";
import { v4 as uuidv4 } from "uuid";
export type CommandProps = {
editor: Editor;
range: Range;
};
export interface IIssueListSuggestion {
title: string;
priority: "high" | "low" | "medium" | "urgent";
identifier: string;
state: "Cancelled" | "In Progress" | "Todo" | "Done" | "Backlog";
command: ({ editor, range }: CommandProps) => void;
}
export const IssueSuggestions = (suggestions: any[]) => {
const mappedSuggestions: IIssueListSuggestion[] = suggestions.map((suggestion): IIssueListSuggestion => {
const transactionId = uuidv4();
return {
title: suggestion.name,
priority: suggestion.priority.toString(),
identifier: `${suggestion.project_detail.identifier}-${suggestion.sequence_id}`,
state: suggestion.state_detail && suggestion.state_detail.name ? suggestion.state_detail.name : "Todo",
command: ({ editor, range }) => {
editor
.chain()
.focus()
.insertContentAt(range, {
type: "issue-embed-component",
attrs: {
entity_identifier: suggestion.id,
id: transactionId,
title: suggestion.name,
project_identifier: suggestion.project_detail.identifier,
sequence_id: suggestion.sequence_id,
entity_name: "issue",
},
})
.run();
},
};
});
return IssueEmbedSuggestions.configure({
suggestion: {
items: getIssueSuggestionItems(mappedSuggestions),
render: IssueListRenderer,
},
});
};

View File

@@ -1,15 +0,0 @@
import { IIssueListSuggestion } from "src/ui/extensions/widgets/issue-embed-suggestion-list";
export const getIssueSuggestionItems =
(issueSuggestions: Array<IIssueListSuggestion>) =>
({ query }: { query: string }) => {
const search = query.toLowerCase();
const filteredSuggestions = issueSuggestions.filter(
(item) =>
item.title.toLowerCase().includes(search) ||
item.identifier.toLowerCase().includes(search) ||
item.priority.toLowerCase().includes(search)
);
return filteredSuggestions;
};

View File

@@ -1,254 +0,0 @@
import { cn } from "@plane/editor-core";
import { Editor } from "@tiptap/core";
import tippy from "tippy.js";
import { ReactRenderer } from "@tiptap/react";
import { useCallback, useEffect, useLayoutEffect, useRef, useState } from "react";
import { PriorityIcon } from "@plane/ui";
const updateScrollView = (container: HTMLElement, item: HTMLElement) => {
const containerHeight = container.offsetHeight;
const itemHeight = item ? item.offsetHeight : 0;
const top = item.offsetTop;
const bottom = top + itemHeight;
if (top < container.scrollTop) {
// container.scrollTop = top - containerHeight;
item.scrollIntoView({
behavior: "smooth",
block: "center",
});
} else if (bottom > containerHeight + container.scrollTop) {
// container.scrollTop = bottom - containerHeight;
item.scrollIntoView({
behavior: "smooth",
block: "center",
});
}
};
interface IssueSuggestionProps {
title: string;
priority: "high" | "low" | "medium" | "urgent" | "none";
state: "Cancelled" | "In Progress" | "Todo" | "Done" | "Backlog";
identifier: string;
}
const IssueSuggestionList = ({
items,
command,
editor,
}: {
items: IssueSuggestionProps[];
command: any;
editor: Editor;
range: any;
}) => {
const [selectedIndex, setSelectedIndex] = useState(0);
const [currentSection, setCurrentSection] = useState<string>("Backlog");
const sections = ["Backlog", "In Progress", "Todo", "Done", "Cancelled"];
const [displayedItems, setDisplayedItems] = useState<{
[key: string]: IssueSuggestionProps[];
}>({});
const [displayedTotalLength, setDisplayedTotalLength] = useState(0);
const commandListContainer = useRef<HTMLDivElement>(null);
useEffect(() => {
const newDisplayedItems: { [key: string]: IssueSuggestionProps[] } = {};
let totalLength = 0;
sections.forEach((section) => {
newDisplayedItems[section] = items.filter((item) => item.state === section).slice(0, 5);
totalLength += newDisplayedItems[section].length;
});
setDisplayedTotalLength(totalLength);
setDisplayedItems(newDisplayedItems);
}, [items]);
const selectItem = useCallback(
(section: string, index: number) => {
const item = displayedItems[section][index];
if (item) {
command(item);
}
},
[command, displayedItems, currentSection]
);
useEffect(() => {
const navigationKeys = ["ArrowUp", "ArrowDown", "Enter", "Tab"];
const onKeyDown = (e: KeyboardEvent) => {
if (navigationKeys.includes(e.key)) {
// if (editor.isFocused) {
// editor.chain().blur();
// commandListContainer.current?.focus();
// }
if (e.key === "ArrowUp") {
setSelectedIndex(
(selectedIndex + displayedItems[currentSection].length - 1) % displayedItems[currentSection].length
);
return true;
}
if (e.key === "ArrowDown") {
const nextIndex = (selectedIndex + 1) % displayedItems[currentSection].length;
setSelectedIndex(nextIndex);
if (nextIndex === 4) {
const nextItems = items
.filter((item) => item.state === currentSection)
.slice(displayedItems[currentSection].length, displayedItems[currentSection].length + 5);
setDisplayedItems((prevItems) => ({
...prevItems,
[currentSection]: [...prevItems[currentSection], ...nextItems],
}));
}
return true;
}
if (e.key === "Enter") {
selectItem(currentSection, selectedIndex);
return true;
}
if (e.key === "Tab") {
const currentSectionIndex = sections.indexOf(currentSection);
const nextSectionIndex = (currentSectionIndex + 1) % sections.length;
setCurrentSection(sections[nextSectionIndex]);
setSelectedIndex(0);
return true;
}
return false;
} else if (e.key === "Escape") {
if (!editor.isFocused) {
editor.chain().focus();
}
}
};
document.addEventListener("keydown", onKeyDown);
return () => {
document.removeEventListener("keydown", onKeyDown);
};
}, [displayedItems, selectedIndex, setSelectedIndex, selectItem, currentSection]);
useLayoutEffect(() => {
const container = commandListContainer?.current;
if (container) {
const sectionContainer = container?.querySelector(`#${currentSection}-container`) as HTMLDivElement;
if (sectionContainer) {
updateScrollView(container, sectionContainer);
}
const sectionScrollContainer = container?.querySelector(`#${currentSection}`) as HTMLElement;
const item = sectionScrollContainer?.children[selectedIndex] as HTMLElement;
if (item && sectionScrollContainer) {
updateScrollView(sectionScrollContainer, item);
}
}
}, [selectedIndex, currentSection]);
return displayedTotalLength > 0 ? (
<div
id="issue-list-container"
ref={commandListContainer}
className=" fixed z-[10] max-h-80 w-96 overflow-y-auto overflow-x-hidden rounded-md border border-custom-border-100 bg-custom-background-100 px-1 shadow-custom-shadow-xs transition-all"
>
{sections.map((section) => {
const sectionItems = displayedItems[section];
return (
sectionItems &&
sectionItems.length > 0 && (
<div className={"flex h-full w-full flex-col"} key={`${section}-container`} id={`${section}-container`}>
<h6
className={
"sticky top-0 z-[10] bg-custom-background-100 px-2 py-1 text-xs font-medium text-custom-text-400"
}
>
{section}
</h6>
<div key={section} id={section} className={"max-h-[140px] overflow-x-hidden overflow-y-scroll"}>
{sectionItems.map((item: IssueSuggestionProps, index: number) => (
<button
className={cn(
`flex w-full items-center space-x-2 rounded-md px-2 py-1 text-left text-sm text-custom-text-200 hover:bg-custom-primary-100/5 hover:text-custom-text-100`,
{
"bg-custom-primary-100/5 text-custom-text-100":
section === currentSection && index === selectedIndex,
}
)}
key={item.identifier}
onClick={() => selectItem(section, index)}
>
<h5 className="whitespace-nowrap text-xs text-custom-text-300">{item.identifier}</h5>
<PriorityIcon priority={item.priority} />
<div className="w-full truncate">
<p className="flex-grow w-full truncate text-xs">{item.title}</p>
</div>
</button>
))}
</div>
</div>
)
);
})}
</div>
) : null;
};
export const IssueListRenderer = () => {
let component: ReactRenderer | null = null;
let popup: any | null = null;
return {
onStart: (props: { editor: Editor; clientRect?: (() => DOMRect | null) | null }) => {
const container = document.querySelector(".frame-renderer") as HTMLElement;
component = new ReactRenderer(IssueSuggestionList, {
props,
// @ts-ignore
editor: props.editor,
});
// @ts-ignore
popup = tippy(".frame-renderer", {
flipbehavior: ["bottom", "top"],
appendTo: () => document.querySelector(".frame-renderer") as HTMLElement,
flip: true,
flipOnUpdate: true,
getReferenceClientRect: props.clientRect,
content: component.element,
showOnCreate: true,
interactive: true,
trigger: "manual",
placement: "bottom-start",
});
container.addEventListener("scroll", () => {
popup?.[0].destroy();
});
},
onUpdate: (props: { editor: Editor; clientRect?: (() => DOMRect | null) | null }) => {
component?.updateProps(props);
popup &&
popup[0].setProps({
getReferenceClientRect: props.clientRect,
});
},
onKeyDown: (props: { event: KeyboardEvent }) => {
if (props.event.key === "Escape") {
popup?.[0].hide();
return true;
}
const navigationKeys = ["ArrowUp", "ArrowDown", "Enter", "Tab"];
if (navigationKeys.includes(props.event.key)) {
// @ts-ignore
component?.ref?.onKeyDown(props);
return true;
}
return false;
},
onExit: (e) => {
const container = document.querySelector(".frame-renderer") as HTMLElement;
if (container) {
container.removeEventListener("scroll", () => {});
}
popup?.[0].destroy();
setTimeout(() => {
component?.destroy();
}, 300);
},
};
};

View File

@@ -1,3 +0,0 @@
import { IssueWidget } from "src/ui/extensions/widgets/issue-embed-widget/issue-widget-node";
export const IssueWidgetPlaceholder = () => IssueWidget.configure({});

View File

@@ -1,33 +0,0 @@
// @ts-nocheck
import { Button } from "@plane/ui";
import { NodeViewWrapper } from "@tiptap/react";
import { Crown } from "lucide-react";
export const IssueWidgetCard = (props) => (
<NodeViewWrapper className="issue-embed-component m-2">
<div
className={`${
props.selected ? "border-custom-primary-200 border-[2px]" : ""
} w-full h-[100px] cursor-pointer space-y-2 rounded-md border-[0.5px] border-custom-border-200 shadow-custom-shadow-2xs`}
>
<h5 className="h-[20%] text-xs text-custom-text-300 p-2">
{props.node.attrs.project_identifier}-{props.node.attrs.sequence_id}
</h5>
<div className="relative h-[71%]">
<div className="h-full backdrop-filter backdrop-blur-[30px] bg-custom-background-80 bg-opacity-30 flex items-center w-full justify-between gap-5 mt-2.5 pl-4 pr-5 py-3 max-md:max-w-full max-md:flex-wrap relative">
<div className="flex gap-2 items-center">
<div className="rounded">
<Crown className="m-2" size={16} color="#FFBA18" />
</div>
<div className="text-custom-text text-sm">
Embed and access issues in pages seamlessly, upgrade to plane pro now.
</div>
</div>
<a href="https://plane.so/pricing" target="_blank" rel="noreferrer">
<Button>Upgrade</Button>
</a>
</div>
</div>
</div>
</NodeViewWrapper>
);

View File

@@ -1,63 +0,0 @@
import { mergeAttributes, Node } from "@tiptap/core";
import { IssueWidgetCard } from "src/ui/extensions/widgets/issue-embed-widget/issue-widget-card";
import { ReactNodeViewRenderer } from "@tiptap/react";
export const IssueWidget = Node.create({
name: "issue-embed-component",
group: "block",
atom: true,
addAttributes() {
return {
id: {
default: null,
},
class: {
default: "w-[600px]",
},
title: {
default: null,
},
entity_name: {
default: null,
},
entity_identifier: {
default: null,
},
project_identifier: {
default: null,
},
sequence_id: {
default: null,
},
};
},
addNodeView() {
return ReactNodeViewRenderer((props: Object) => <IssueWidgetCard {...props} />);
},
parseHTML() {
return [
{
tag: "issue-embed-component",
getAttrs: (node: string | HTMLElement) => {
if (typeof node === "string") {
return null;
}
return {
id: node.getAttribute("id") || "",
title: node.getAttribute("title") || "",
entity_name: node.getAttribute("entity_name") || "",
entity_identifier: node.getAttribute("entity_identifier") || "",
project_identifier: node.getAttribute("project_identifier") || "",
sequence_id: node.getAttribute("sequence_id") || "",
};
},
},
];
},
renderHTML({ HTMLAttributes }) {
return ["issue-embed-component", mergeAttributes(HTMLAttributes)];
},
});

View File

@@ -0,0 +1,2 @@
export * from "./issue-widget-node";
export * from "./types";

View File

@@ -0,0 +1,46 @@
import { mergeAttributes, Node } from "@tiptap/core";
import { ReactNodeViewRenderer, NodeViewWrapper } from "@tiptap/react";
type Props = {
widgetCallback: (issueId: string) => React.ReactNode;
};
export const IssueWidget = (props: Props) =>
Node.create({
name: "issue-embed-component",
group: "block",
atom: true,
selectable: true,
draggable: true,
addAttributes() {
return {
entity_identifier: {
default: undefined,
},
id: {
default: undefined,
},
entity_name: {
default: undefined,
},
};
},
addNodeView() {
return ReactNodeViewRenderer((issueProps: any) => (
<NodeViewWrapper>{props.widgetCallback(issueProps.node.attrs.entity_identifier)}</NodeViewWrapper>
));
},
parseHTML() {
return [
{
tag: "issue-embed-component",
},
];
},
renderHTML({ HTMLAttributes }) {
return ["issue-embed-component", mergeAttributes(HTMLAttributes)];
},
});

View File

@@ -0,0 +1,19 @@
export type TEmbedConfig = {
issue?: TIssueEmbedConfig;
};
export type TReadOnlyEmbedConfig = {
issue?: Omit<TIssueEmbedConfig, "searchCallback">;
};
export type TIssueEmbedConfig = {
searchCallback: (searchQuery: string) => Promise<TEmbedItem[]>;
widgetCallback: (issueId: string) => React.ReactNode;
};
export type TEmbedItem = {
id: string;
title: string;
subTitle: string;
icon: React.ReactNode;
};

View File

@@ -0,0 +1,2 @@
export * from "./block";
export * from "./suggestions-list";

View File

@@ -0,0 +1,2 @@
export * from "./issue-suggestion-extension";
export * from "./issue-suggestion-renderer";

Some files were not shown because too many files have changed in this diff Show More