fix: merge conflicts

This commit is contained in:
sriram veeraghanta
2024-10-22 14:16:00 +05:30
1564 changed files with 107445 additions and 3790 deletions

View File

@@ -26,9 +26,10 @@ AWS_S3_BUCKET_NAME="uploads"
FILE_SIZE_LIMIT=5242880
# GPT settings
SILO_BASE_URL=
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
OPENAI_API_KEY="sk-" # deprecated
GPT_ENGINE="gpt-3.5-turbo" # deprecated
GPT_ENGINE="gpt-4o-mini" # deprecated
# Settings related to Docker
DOCKERIZED=1 # deprecated
@@ -38,3 +39,11 @@ USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
# Imports Config
SILO_BASE_URL=
MONGO_DB_URL="mongodb://plane-mongodb:27017/"
SILO_DB=silo
SILO_DB_URL=postgresql://plane:plane@plane-db/silo

168
.github/actions/build-push-ee/action.yml vendored Normal file
View File

@@ -0,0 +1,168 @@
name: "Build and Push Docker Image"
description: "Reusable action for building and pushing Docker images"
inputs:
docker-username:
description: "The Dockerhub username"
required: true
dockerhub-token:
description: "The Dockerhub Token"
required: true
# Harbor Options
harbor-push:
description: "Flag to push to Harbor"
required: false
default: "false"
harbor-username:
description: "The Harbor username"
required: false
harbor-token:
description: "The Harbor token"
required: false
harbor-registry:
description: "The Harbor registry"
required: false
default: "registry.plane.tools"
harbor-project:
description: "The Harbor project"
required: false
# Docker Image Options
docker-image-owner:
description: "The owner of the Docker image"
required: true
docker-image-name:
description: "The name of the Docker image"
required: true
build-context:
description: "The build context"
required: true
default: "."
dockerfile-path:
description: "The path to the Dockerfile"
required: true
build-args:
description: "The build arguments"
required: false
default: ""
# Buildx Options
buildx-driver:
description: "Buildx driver"
required: true
default: "docker-container"
buildx-version:
description: "Buildx version"
required: true
default: "latest"
buildx-platforms:
description: "Buildx platforms"
required: true
default: "linux/amd64"
buildx-endpoint:
description: "Buildx endpoint"
required: true
default: "default"
# Release Build Options
build-release:
description: "Flag to publish release"
required: false
default: "false"
build-prerelease:
description: "Flag to publish prerelease"
required: false
default: "false"
release-version:
description: "The release version"
required: false
default: "latest"
runs:
using: "composite"
steps:
- name: Set Docker Tag
shell: bash
env:
IMG_OWNER: ${{ inputs.docker-image-owner }}
IMG_NAME: ${{ inputs.docker-image-name }}
HARBOR_PUSH: ${{ inputs.harbor-push }}
HARBOR_REGISTRY: ${{ inputs.harbor-registry }}
HARBOR_PROJECT: ${{ inputs.harbor-project }}
BUILD_RELEASE: ${{ inputs.build-release }}
IS_PRERELEASE: ${{ inputs.build-prerelease }}
REL_VERSION: ${{ inputs.release-version }}
run: |
FLAT_BRANCH_VERSION=$(echo "${{ github.ref_name }}" | sed 's/[^a-zA-Z0-9.-]//g')
if [ "${{ env.BUILD_RELEASE }}" == "true" ]; then
semver_regex="^v([0-9]+)\.([0-9]+)\.([0-9]+)(-[a-zA-Z0-9]+(-[a-zA-Z0-9]+)*)?$"
if [[ ! ${{ env.REL_VERSION }} =~ $semver_regex ]]; then
echo "Invalid Release Version Format : ${{ env.REL_VERSION }}"
echo "Please provide a valid SemVer version"
echo "e.g. v1.2.3 or v1.2.3-alpha-1"
echo "Exiting the build process"
exit 1 # Exit with status 1 to fail the step
fi
TAG=${{ env.IMG_OWNER }}/${{ env.IMG_NAME }}:${{ env.REL_VERSION }}
if [ "${{ env.HARBOR_PUSH }}" == "true" ]; then
TAG=${TAG},${{ env.HARBOR_REGISTRY }}/${{ env.HARBOR_PROJECT }}/${{ env.IMG_NAME }}:${{ env.REL_VERSION }}
fi
if [ "${{ env.IS_PRERELEASE }}" != "true" ]; then
TAG=${TAG},${{ env.IMG_OWNER }}/${{ env.IMG_NAME }}:stable
if [ "${{ env.HARBOR_PUSH }}" == "true" ]; then
TAG=${TAG},${{ env.HARBOR_REGISTRY }}/${{ env.HARBOR_PROJECT }}/${{ env.IMG_NAME }}:stable
fi
fi
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ env.IMG_OWNER }}/${{ env.IMG_NAME }}:latest
if [ "${{ env.HARBOR_PUSH }}" == "true" ]; then
TAG=${TAG},${{ env.HARBOR_REGISTRY }}/${{ env.HARBOR_PROJECT }}/${{ env.IMG_NAME }}:latest
fi
else
TAG=${{ env.IMG_OWNER }}/${{ env.IMG_NAME }}:${FLAT_BRANCH_VERSION}
if [ "${{ env.HARBOR_PUSH }}" == "true" ]; then
TAG=${TAG},${{ env.HARBOR_REGISTRY }}/${{ env.HARBOR_PROJECT }}/${{ env.IMG_NAME }}:${FLAT_BRANCH_VERSION}
fi
fi
echo "DOCKER_TAGS=${TAG}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ inputs.docker-username }}
password: ${{ inputs.dockerhub-token}}
- name: Login to Harbor
if: ${{ inputs.harbor-push }} == "true"
uses: docker/login-action@v3
with:
username: ${{ inputs.harbor-username }}
password: ${{ inputs.harbor-token }}
registry: ${{ inputs.harbor-registry }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: ${{ inputs.buildx-driver }}
version: ${{ inputs.buildx-version }}
endpoint: ${{ inputs.buildx-endpoint }}
- name: Check out the repo
uses: actions/checkout@v4
- name: Build and Push Docker Image
uses: docker/build-push-action@v5.1.0
with:
context: ${{ inputs.build-context }}
file: ${{ inputs.dockerfile-path }}
platforms: ${{ inputs.buildx-platforms }}
tags: ${{ env.DOCKER_TAGS }}
push: true
build-args: ${{ inputs.build-args }}
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ inputs.docker-username }}
DOCKER_PASSWORD: ${{ inputs.dockerhub-token }}

11
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,11 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
version: 2
updates:
- package-ecosystem: "" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "weekly"

View File

@@ -0,0 +1,204 @@
name: Branch Build AIO
on:
workflow_dispatch:
inputs:
full:
description: 'Run full build'
type: boolean
required: false
default: true
slim:
description: 'Run slim build'
type: boolean
required: false
default: true
base_tag_name:
description: 'Base Tag Name'
required: false
default: ''
release:
types: [released, prereleased]
env:
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
IS_PRERELEASE: ${{ github.event.release.prerelease }}
FULL_BUILD_INPUT: ${{ github.event.inputs.full }}
SLIM_BUILD_INPUT: ${{ github.event.inputs.slim }}
jobs:
branch_build_setup:
name: Build Setup
runs-on: ubuntu-latest
outputs:
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
aio_base_tag: ${{ steps.set_env_variables.outputs.AIO_BASE_TAG }}
do_full_build: ${{ steps.set_env_variables.outputs.DO_FULL_BUILD }}
do_slim_build: ${{ steps.set_env_variables.outputs.DO_SLIM_BUILD }}
steps:
- id: set_env_variables
name: Set Environment Variables
run: |
if [ [ "${{ github.event_name }}" == "release" ] && [ "${{ env.IS_PRERELEASE }}" != "true" ] ; then
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
echo "AIO_BASE_TAG=latest" >> $GITHUB_OUTPUT
else
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
if [ "${{ github.event.inputs.base_tag_name }}" != "" ]; then
echo "AIO_BASE_TAG=${{ github.event.inputs.base_tag_name }}" >> $GITHUB_OUTPUT
elif [ "${{ env.TARGET_BRANCH }}" == "preview" ]; then
echo "AIO_BASE_TAG=preview" >> $GITHUB_OUTPUT
else
echo "AIO_BASE_TAG=develop" >> $GITHUB_OUTPUT
fi
fi
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
if [ "${{ env.FULL_BUILD_INPUT }}" == "true" ] || [ "${{github.event_name}}" == "push" ] || [ "${{github.event_name}}" == "release" ]; then
echo "DO_FULL_BUILD=true" >> $GITHUB_OUTPUT
else
echo "DO_FULL_BUILD=false" >> $GITHUB_OUTPUT
fi
if [ "${{ env.SLIM_BUILD_INPUT }}" == "true" ] || [ "${{github.event_name}}" == "push" ] || [ "${{github.event_name}}" == "release" ]; then
echo "DO_SLIM_BUILD=true" >> $GITHUB_OUTPUT
else
echo "DO_SLIM_BUILD=false" >> $GITHUB_OUTPUT
fi
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
full_build_push:
if: ${{ needs.branch_build_setup.outputs.do_full_build == 'true' }}
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
BUILD_TYPE: full
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
AIO_IMAGE_TAGS: makeplane/plane-aio-enterprise:full-${{ needs.branch_build_setup.outputs.gh_branch_name }}
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Set Docker Tag
run: |
if [ "${{ github.event_name }}" == "release" ]; then
TAG=makeplane/plane-aio-enterprise:${{env.BUILD_TYPE}}-stable,makeplane/plane-aio-enterprise:${{env.BUILD_TYPE}}-${{ github.event.release.tag_name }}
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=makeplane/plane-aio-enterprise:${{env.BUILD_TYPE}}-latest
else
TAG=${{ env.AIO_IMAGE_TAGS }}
fi
echo "AIO_IMAGE_TAGS=${TAG}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: ${{ env.BUILDX_DRIVER }}
version: ${{ env.BUILDX_VERSION }}
endpoint: ${{ env.BUILDX_ENDPOINT }}
- name: Check out the repo
uses: actions/checkout@v4
- name: Build and Push to Docker Hub
uses: docker/build-push-action@v5.1.0
with:
context: .
file: ./aio/Dockerfile-app
platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.AIO_IMAGE_TAGS }}
push: true
build-args: |
BUILD_TAG=${{ env.AIO_BASE_TAG }}
BUILD_TYPE=${{env.BUILD_TYPE}}
cache-from: type=gha
cache-to: type=gha,mode=max
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
slim_build_push:
if: ${{ needs.branch_build_setup.outputs.do_slim_build == 'true' }}
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
BUILD_TYPE: slim
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
AIO_IMAGE_TAGS: makeplane/plane-aio-enterprise:slim-${{ needs.branch_build_setup.outputs.gh_branch_name }}
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Set Docker Tag
run: |
if [ "${{ github.event_name }}" == "release" ]; then
TAG=makeplane/plane-aio-enterprise:${{env.BUILD_TYPE}}-stable,makeplane/plane-aio-enterprise:${{env.BUILD_TYPE}}-${{ github.event.release.tag_name }}
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=makeplane/plane-aio-enterprise:${{env.BUILD_TYPE}}-latest
else
TAG=${{ env.AIO_IMAGE_TAGS }}
fi
echo "AIO_IMAGE_TAGS=${TAG}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: ${{ env.BUILDX_DRIVER }}
version: ${{ env.BUILDX_VERSION }}
endpoint: ${{ env.BUILDX_ENDPOINT }}
- name: Check out the repo
uses: actions/checkout@v4
- name: Build and Push to Docker Hub
uses: docker/build-push-action@v5.1.0
with:
context: .
file: ./aio/Dockerfile-app
platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.AIO_IMAGE_TAGS }}
push: true
build-args: |
BUILD_TAG=${{ env.AIO_BASE_TAG }}
BUILD_TYPE=${{env.BUILD_TYPE}}
cache-from: type=gha
cache-to: type=gha,mode=max
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}

532
.github/workflows/build-branch-ee.yml vendored Normal file
View File

@@ -0,0 +1,532 @@
name: Branch Build Enterprise
on:
workflow_dispatch:
inputs:
build_type:
description: "Type of build to run"
required: true
type: choice
default: "Build"
options:
- "Build"
- "Release"
releaseVersion:
description: "Release Version"
type: string
default: v0.0.0
isPrerelease:
description: "Is Pre-release"
type: boolean
default: false
required: true
arm64:
description: "Build for ARM64 architecture"
required: false
default: false
type: boolean
push:
branches:
- master
env:
TARGET_BRANCH: ${{ github.ref_name }}
ARM64_BUILD: ${{ github.event.inputs.arm64 }}
BUILD_TYPE: ${{ github.event.inputs.build_type }}
RELEASE_VERSION: ${{ github.event.inputs.releaseVersion }}
IS_PRERELEASE: ${{ github.event.inputs.isPrerelease }}
jobs:
branch_build_setup:
name: Build Setup
runs-on: ubuntu-20.04
outputs:
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
build_web: ${{ steps.changed_files.outputs.web_any_changed }}
build_admin: ${{ steps.changed_files.outputs.admin_any_changed }}
build_space: ${{ steps.changed_files.outputs.space_any_changed }}
build_live: ${{ steps.changed_files.outputs.live_any_changed }}
build_apiserver: ${{ steps.changed_files.outputs.apiserver_any_changed }}
build_proxy: ${{ steps.changed_files.outputs.proxy_any_changed }}
build_monitor: ${{ steps.changed_files.outputs.monitor_any_changed }}
build_silo: ${{ steps.changed_files.outputs.silo_any_changed }}
artifact_upload_to_s3: ${{ steps.set_env_variables.outputs.artifact_upload_to_s3 }}
artifact_s3_suffix: ${{ steps.set_env_variables.outputs.artifact_s3_suffix }}
dh_img_web: ${{ steps.set_env_variables.outputs.DH_IMG_WEB }}
dh_img_space: ${{ steps.set_env_variables.outputs.DH_IMG_SPACE }}
dh_img_admin: ${{ steps.set_env_variables.outputs.DH_IMG_ADMIN }}
dh_img_live: ${{ steps.set_env_variables.outputs.DH_IMG_LIVE }}
dh_img_backend: ${{ steps.set_env_variables.outputs.DH_IMG_BACKEND }}
dh_img_proxy: ${{ steps.set_env_variables.outputs.DH_IMG_PROXY }}
dh_img_monitor: ${{ steps.set_env_variables.outputs.DH_IMG_MONITOR }}
dh_img_silo: ${{ steps.set_env_variables.outputs.DH_IMG_SILO }}
harbor_push: ${{ steps.set_env_variables.outputs.HARBOR_PUSH }}
build_type: ${{steps.set_env_variables.outputs.BUILD_TYPE}}
build_release: ${{ steps.set_env_variables.outputs.BUILD_RELEASE }}
build_prerelease: ${{ steps.set_env_variables.outputs.BUILD_PRERELEASE }}
release_version: ${{ steps.set_env_variables.outputs.RELEASE_VERSION }}
steps:
- id: set_env_variables
name: Set Environment Variables
run: |
if [ "${{ env.ARM64_BUILD }}" == "true" ] || ([ "${{ env.BUILD_TYPE }}" == "Release" ] && [ "${{ env.IS_PRERELEASE }}" != "true" ]); then
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
else
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
fi
BR_NAME=$( echo "${{ env.TARGET_BRANCH }}" |sed 's/[^a-zA-Z0-9.-]//g')
echo "TARGET_BRANCH=$BR_NAME" >> $GITHUB_OUTPUT
echo "DH_IMG_WEB=web-enterprise" >> $GITHUB_OUTPUT
echo "DH_IMG_SPACE=space-enterprise" >> $GITHUB_OUTPUT
echo "DH_IMG_ADMIN=admin-enterprise" >> $GITHUB_OUTPUT
echo "DH_IMG_LIVE=live-enterprise" >> $GITHUB_OUTPUT
echo "DH_IMG_BACKEND=backend-enterprise" >> $GITHUB_OUTPUT
echo "DH_IMG_PROXY=proxy-enterprise" >> $GITHUB_OUTPUT
echo "DH_IMG_MONITOR=monitor-enterprise" >> $GITHUB_OUTPUT
echo "DH_IMG_SILO=silo-enterprise" >> $GITHUB_OUTPUT
echo "BUILD_TYPE=${{env.BUILD_TYPE}}" >> $GITHUB_OUTPUT
BUILD_RELEASE=false
BUILD_PRERELEASE=false
RELVERSION="latest"
HARBOR_PUSH=false
if [ "${{ env.BUILD_TYPE }}" == "Release" ]; then
FLAT_RELEASE_VERSION=$(echo "${{ env.RELEASE_VERSION }}" | sed 's/[^a-zA-Z0-9.-]//g')
echo "FLAT_RELEASE_VERSION=${FLAT_RELEASE_VERSION}" >> $GITHUB_OUTPUT
HARBOR_PUSH=true
semver_regex="^v([0-9]+)\.([0-9]+)\.([0-9]+)(-[a-zA-Z0-9]+(-[a-zA-Z0-9]+)*)?$"
if [[ ! $FLAT_RELEASE_VERSION =~ $semver_regex ]]; then
echo "Invalid Release Version Format : $FLAT_RELEASE_VERSION"
echo "Please provide a valid SemVer version"
echo "e.g. v1.2.3 or v1.2.3-alpha-1"
echo "Exiting the build process"
exit 1 # Exit with status 1 to fail the step
fi
BUILD_RELEASE=true
RELVERSION=$FLAT_RELEASE_VERSION
if [ "${{ env.IS_PRERELEASE }}" == "true" ]; then
BUILD_PRERELEASE=true
fi
fi
echo "BUILD_RELEASE=${BUILD_RELEASE}" >> $GITHUB_OUTPUT
echo "BUILD_PRERELEASE=${BUILD_PRERELEASE}" >> $GITHUB_OUTPUT
echo "RELEASE_VERSION=${RELVERSION}" >> $GITHUB_OUTPUT
echo "HARBOR_PUSH=${HARBOR_PUSH}" >> $GITHUB_OUTPUT
if [ "${{ env.BUILD_TYPE }}" == "Release" ]; then
echo "artifact_upload_to_s3=true" >> $GITHUB_OUTPUT
echo "artifact_s3_suffix=${{ env.RELEASE_VERSION }}" >> $GITHUB_OUTPUT
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
echo "artifact_upload_to_s3=true" >> $GITHUB_OUTPUT
echo "artifact_s3_suffix=latest" >> $GITHUB_OUTPUT
elif [ "${{ env.TARGET_BRANCH }}" == "preview" ] || [ "${{ env.TARGET_BRANCH }}" == "develop" ] || [ "${{ env.TARGET_BRANCH }}" == "uat" ]; then
echo "artifact_upload_to_s3=true" >> $GITHUB_OUTPUT
echo "artifact_s3_suffix=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
else
echo "artifact_upload_to_s3=false" >> $GITHUB_OUTPUT
echo "artifact_s3_suffix=$BR_NAME" >> $GITHUB_OUTPUT
fi
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
- name: Get changed files
id: changed_files
uses: tj-actions/changed-files@v42
with:
files_yaml: |
apiserver:
- apiserver/**
proxy:
- nginx/**
admin:
- admin/**
- packages/**
- "package.json"
- "yarn.lock"
- "tsconfig.json"
- "turbo.json"
space:
- space/**
- packages/**
- "package.json"
- "yarn.lock"
- "tsconfig.json"
- "turbo.json"
web:
- web/**
- packages/**
- "package.json"
- "yarn.lock"
- "tsconfig.json"
- "turbo.json"
silo:
- silo/**
- packages/**
- 'package.json'
- 'yarn.lock'
- 'tsconfig.json'
- 'turbo.json'
live:
- live/**
- packages/**
- 'package.json'
- 'yarn.lock'
- 'tsconfig.json'
- 'turbo.json'
monitor:
- monitor/**
branch_build_push_admin:
if: ${{ needs.branch_build_setup.outputs.build_admin == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
name: Build-Push Admin Docker Image
runs-on: ubuntu-20.04
needs: [branch_build_setup]
steps:
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
- name: Admin Build and Push
uses: ./.github/actions/build-push-ee
with:
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
harbor-push: ${{ needs.branch_build_setup.outputs.harbor_push }}
harbor-username: ${{ secrets.HARBOR_USERNAME }}
harbor-token: ${{ secrets.HARBOR_TOKEN }}
harbor-registry: ${{ vars.HARBOR_REGISTRY }}
harbor-project: ${{ vars.HARBOR_PROJECT }}
docker-image-owner: makeplane
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_admin }}
build-context: .
dockerfile-path: ./admin/Dockerfile.admin
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
branch_build_push_web:
if: ${{ needs.branch_build_setup.outputs.build_web == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
name: Build-Push Web Docker Image
runs-on: ubuntu-20.04
needs: [branch_build_setup]
steps:
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
- name: Web Build and Push
uses: ./.github/actions/build-push-ee
with:
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
harbor-push: ${{ needs.branch_build_setup.outputs.harbor_push }}
harbor-username: ${{ secrets.HARBOR_USERNAME }}
harbor-token: ${{ secrets.HARBOR_TOKEN }}
harbor-registry: ${{ vars.HARBOR_REGISTRY }}
harbor-project: ${{ vars.HARBOR_PROJECT }}
docker-image-owner: makeplane
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_web }}
build-context: .
dockerfile-path: ./web/Dockerfile.web
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
branch_build_push_space:
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
name: Build-Push Space Docker Image
runs-on: ubuntu-20.04
needs: [branch_build_setup]
steps:
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
- name: Space Build and Push
uses: ./.github/actions/build-push-ee
with:
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
harbor-push: ${{ needs.branch_build_setup.outputs.harbor_push }}
harbor-username: ${{ secrets.HARBOR_USERNAME }}
harbor-token: ${{ secrets.HARBOR_TOKEN }}
harbor-registry: ${{ vars.HARBOR_REGISTRY }}
harbor-project: ${{ vars.HARBOR_PROJECT }}
docker-image-owner: makeplane
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_space }}
build-context: .
dockerfile-path: ./space/Dockerfile.space
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
branch_build_push_live:
if: ${{ needs.branch_build_setup.outputs.build_live == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
name: Build-Push Live Collaboration Docker Image
runs-on: ubuntu-20.04
needs: [branch_build_setup]
steps:
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
- name: Live Build and Push
uses: ./.github/actions/build-push-ee
with:
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
harbor-push: ${{ needs.branch_build_setup.outputs.harbor_push }}
harbor-username: ${{ secrets.HARBOR_USERNAME }}
harbor-token: ${{ secrets.HARBOR_TOKEN }}
harbor-registry: ${{ vars.HARBOR_REGISTRY }}
harbor-project: ${{ vars.HARBOR_PROJECT }}
docker-image-owner: makeplane
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_live }}
build-context: .
dockerfile-path: ./live/Dockerfile.live
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
branch_build_push_silo:
if: ${{ needs.branch_build_setup.outputs.build_silo == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
name: Build-Push Silo Docker Image
runs-on: ubuntu-20.04
needs: [branch_build_setup]
steps:
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
- name: Silo Build and Push
uses: ./.github/actions/build-push-ee
with:
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
harbor-push: ${{ needs.branch_build_setup.outputs.harbor_push }}
harbor-username: ${{ secrets.HARBOR_USERNAME }}
harbor-token: ${{ secrets.HARBOR_TOKEN }}
harbor-registry: ${{ vars.HARBOR_REGISTRY }}
harbor-project: ${{ vars.HARBOR_PROJECT }}
docker-image-owner: makeplane
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_silo }}
build-context: .
dockerfile-path: ./silo/Dockerfile.silo
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
branch_build_push_apiserver:
if: ${{ needs.branch_build_setup.outputs.build_apiserver == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
name: Build-Push API Server Docker Image
runs-on: ubuntu-20.04
needs: [branch_build_setup]
steps:
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
- name: Backend Build and Push
uses: ./.github/actions/build-push-ee
with:
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
harbor-push: ${{ needs.branch_build_setup.outputs.harbor_push }}
harbor-username: ${{ secrets.HARBOR_USERNAME }}
harbor-token: ${{ secrets.HARBOR_TOKEN }}
harbor-registry: ${{ vars.HARBOR_REGISTRY }}
harbor-project: ${{ vars.HARBOR_PROJECT }}
docker-image-owner: makeplane
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_backend }}
build-context: ./apiserver
dockerfile-path: ./apiserver/Dockerfile.api
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
branch_build_push_proxy:
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
name: Build-Push Proxy Docker Image
runs-on: ubuntu-20.04
needs: [branch_build_setup]
steps:
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
- name: Proxy Build and Push
uses: ./.github/actions/build-push-ee
with:
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
harbor-push: ${{ needs.branch_build_setup.outputs.harbor_push }}
harbor-username: ${{ secrets.HARBOR_USERNAME }}
harbor-token: ${{ secrets.HARBOR_TOKEN }}
harbor-registry: ${{ vars.HARBOR_REGISTRY }}
harbor-project: ${{ vars.HARBOR_PROJECT }}
docker-image-owner: makeplane
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_proxy }}
build-context: ./nginx
dockerfile-path: ./nginx/Dockerfile
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
branch_build_push_monitor:
if: ${{ needs.branch_build_setup.outputs.build_monitor == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
name: Build-Push Monitor Docker Image
runs-on: ubuntu-20.04
needs: [branch_build_setup]
steps:
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
- name: Generate Keypair
run: |
if [ "${{ env.TARGET_BRANCH }}" == "develop" ] || [ "${{ env.TARGET_BRANCH }}" == "preview" ] || [ "${{ env.TARGET_BRANCH }}" == "uat" ]; then
echo "${{ secrets.DEFAULT_PRIME_PRIVATE_KEY }}" > private_key.pem
else
openssl genrsa -out private_key.pem 2048
fi
openssl rsa -in private_key.pem -pubout -out public_key.pem
cat public_key.pem
# Generating the private key env for the generated keys
PRIVATE_KEY=$(cat private_key.pem | base64 -w 0)
echo "PRIVATE_KEY=${PRIVATE_KEY}" >> $GITHUB_ENV
- name: Monitor Build and Push
uses: ./.github/actions/build-push-ee
with:
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
harbor-push: ${{ needs.branch_build_setup.outputs.harbor_push }}
harbor-username: ${{ secrets.HARBOR_USERNAME }}
harbor-token: ${{ secrets.HARBOR_TOKEN }}
harbor-registry: ${{ vars.HARBOR_REGISTRY }}
harbor-project: ${{ vars.HARBOR_PROJECT }}
docker-image-owner: makeplane
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_monitor }}
build-context: ./monitor
dockerfile-path: ./monitor/Dockerfile
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
build-args: |
PRIVATE_KEY=${{ env.PRIVATE_KEY }}
upload_artifacts_s3:
if: ${{ needs.branch_build_setup.outputs.artifact_upload_to_s3 == 'true' }}
name: Upload artifacts to S3 Bucket
runs-on: ubuntu-20.04
needs: [branch_build_setup]
container:
image: docker:20.10.7
credentials:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
env:
ARTIFACT_SUFFIX: ${{ needs.branch_build_setup.outputs.artifact_s3_suffix }}
AWS_ACCESS_KEY_ID: ${{ secrets.SELF_HOST_BUCKET_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.SELF_HOST_BUCKET_SECRET_KEY }}
steps:
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
- name: Upload artifacts
run: |
apk update
apk add --no-cache aws-cli
mkdir -p ~/${{ env.ARTIFACT_SUFFIX }}
cp deploy/cli-install/variables.env ~/${{ env.ARTIFACT_SUFFIX }}/variables.env
cp deploy/cli-install/Caddyfile ~/${{ env.ARTIFACT_SUFFIX }}/Caddyfile
sed -e 's@${APP_RELEASE_VERSION}@'${{ env.ARTIFACT_SUFFIX }}'@' deploy/cli-install/docker-compose.yml > ~/${{ env.ARTIFACT_SUFFIX }}/docker-compose.yml
sed -e 's@${APP_RELEASE_VERSION}@'${{ env.ARTIFACT_SUFFIX }}'@' deploy/cli-install/docker-compose-caddy.yml > ~/${{ env.ARTIFACT_SUFFIX }}/docker-compose-caddy.yml
aws s3 cp ~/${{ env.ARTIFACT_SUFFIX }} s3://${{ vars.SELF_HOST_BUCKET_NAME }}/plane-enterprise/${{ env.ARTIFACT_SUFFIX }} --recursive
publish_release:
if: ${{ needs.branch_build_setup.outputs.build_type == 'Release' }}
name: Build Release
runs-on: ubuntu-20.04
needs:
[
branch_build_setup,
branch_build_push_admin,
branch_build_push_web,
branch_build_push_space,
branch_build_push_live,
branch_build_push_apiserver,
branch_build_push_proxy,
branch_build_push_monitor,
branch_build_push_silo,
upload_artifacts_s3,
]
env:
REL_VERSION: ${{ needs.branch_build_setup.outputs.release_version }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Create Release
id: create_release
uses: softprops/action-gh-release@v2.0.8
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token
with:
tag_name: ${{ env.REL_VERSION }}
name: ${{ env.REL_VERSION }}
draft: false
prerelease: ${{ env.IS_PRERELEASE }}
generate_release_notes: true
files: |
${{ github.workspace }}/deploy/cli-install/variables.env
${{ github.workspace }}/deploy/cli-install/Caddyfile
${{ github.workspace }}/deploy/cli-install/docker-compose.yml
${{ github.workspace }}/deploy/cli-install/docker-compose-caddy.yml

View File

@@ -0,0 +1,162 @@
name: Build and Lint on Pull Request EE
on:
workflow_dispatch:
issue_comment:
types: [created]
jobs:
get-changed-files:
if: github.event.issue.pull_request != '' && github.event.comment.body == 'build-test-pr'
runs-on: ubuntu-latest
outputs:
apiserver_changed: ${{ steps.changed-files.outputs.apiserver_any_changed }}
admin_changed: ${{ steps.changed-files.outputs.admin_any_changed }}
space_changed: ${{ steps.changed-files.outputs.space_any_changed }}
web_changed: ${{ steps.changed-files.outputs.web_any_changed }}
monitor_changed: ${{ steps.changed-files.outputs.monitor_any_changed }}
steps:
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@v44
with:
files_yaml: |
apiserver:
- apiserver/**
admin:
- admin/**
- packages/**
- 'package.json'
- 'yarn.lock'
- 'tsconfig.json'
- 'turbo.json'
space:
- space/**
- packages/**
- 'package.json'
- 'yarn.lock'
- 'tsconfig.json'
- 'turbo.json'
web:
- web/**
- packages/**
- 'package.json'
- 'yarn.lock'
- 'tsconfig.json'
- 'turbo.json'
monitor:
- monitor/**
lint-apiserver:
needs: get-changed-files
runs-on: ubuntu-latest
if: needs.get-changed-files.outputs.apiserver_changed == 'true'
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x" # Specify the Python version you need
- name: Install Pylint
run: python -m pip install ruff
- name: Install Apiserver Dependencies
run: cd apiserver && pip install -r requirements.txt
- name: Lint apiserver
run: ruff check --fix apiserver
lint-admin:
needs: get-changed-files
if: needs.get-changed-files.outputs.admin_changed == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 18.x
- run: yarn install
- run: yarn lint --filter=admin
lint-space:
needs: get-changed-files
if: needs.get-changed-files.outputs.space_changed == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 18.x
- run: yarn install
- run: yarn lint --filter=space
lint-web:
needs: get-changed-files
if: needs.get-changed-files.outputs.web_changed == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 18.x
- run: yarn install
- run: yarn lint --filter=web
test-monitor:
needs: get-changed-files
if: needs.get-changed-files.outputs.monitor_changed == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with:
go-version: "1.22.2"
- run: cd ./monitor && make test
build-admin:
needs: lint-admin
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 18.x
- run: yarn install
- run: yarn build --filter=admin
build-space:
needs: lint-space
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 18.x
- run: yarn install
- run: yarn build --filter=space
build-web:
needs: lint-web
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 18.x
- run: yarn install
- run: yarn build --filter=web
build-monitor:
needs: test-monitor
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with:
go-version: "1.22.2"
- run: cd ./monitor && make build

70
.github/workflows/create-release.yml vendored Normal file
View File

@@ -0,0 +1,70 @@
name: Manual Release Workflow
on:
workflow_dispatch:
inputs:
release_tag:
description: 'Release Tag (e.g., v0.16-cannary-1)'
required: true
prerelease:
description: 'Pre-Release'
required: true
default: true
type: boolean
draft:
description: 'Draft'
required: true
default: true
type: boolean
permissions:
contents: write
jobs:
create-release:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
with:
fetch-depth: 0 # Necessary to fetch all history for tags
- name: Set up Git
run: |
git config user.name "github-actions"
git config user.email "github-actions@github.com"
- name: Check for the Prerelease
run: |
echo ${{ github.event.release.prerelease }}
- name: Generate Release Notes
id: generate_notes
run: |
bash ./generate_release_notes.sh
# Directly use the content of RELEASE_NOTES.md for the release body
RELEASE_NOTES=$(cat RELEASE_NOTES.md)
echo "RELEASE_NOTES<<EOF" >> $GITHUB_ENV
echo "$RELEASE_NOTES" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
- name: Create Tag
run: |
git tag ${{ github.event.inputs.release_tag }}
git push origin ${{ github.event.inputs.release_tag }}
- name: Create GitHub Release
uses: softprops/action-gh-release@v1
with:
tag_name: ${{ github.event.inputs.release_tag }}
body_path: RELEASE_NOTES.md
draft: ${{ github.event.inputs.draft }}
prerelease: ${{ github.event.inputs.prerelease }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

55
.github/workflows/sync-community.yml vendored Normal file
View File

@@ -0,0 +1,55 @@
name: Sync from Community Repo
on:
# schedule:
# - cron: "*/30 * * * *" # Runs every 30 minutes
workflow_dispatch:
inputs:
source_branch:
description: "Source branch in Community repo"
required: true
default: "preview"
target_branch:
description: "Target branch in Enterprise repo"
required: true
default: "preview"
jobs:
sync-from-community-repo:
runs-on: ubuntu-latest
steps:
- name: Checkout enterprise repository
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set branch names
run: |
echo "SOURCE_BRANCH=${{ github.event.inputs.source_branch || 'preview' }}" >> $GITHUB_ENV
echo "TARGET_BRANCH=${{ github.event.inputs.target_branch || 'preview' }}" >> $GITHUB_ENV
echo "SYNC_BRANCH=sync-${{ github.run_id }}" >> $GITHUB_ENV
- name: Create sync branch
run: git checkout -b ${{ env.SYNC_BRANCH }}
- name: Fetch from community repository
run: |
git config user.name github-actions
git config user.email github-actions@github.com
git remote add community https://github.com/makeplane/plane.git
git reset --hard community/${{ env.SOURCE_BRANCH }}
- name: Create Pull Request
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_TITLE="Sync changes from community repo"
EXISTING_PR=$(gh pr list --base ${{ env.TARGET_BRANCH }} --head ${{ env.SYNC_BRANCH }} --json number --jq '.[0].number')
if [ -z "$EXISTING_PR" ]; then
pr_url=$(gh pr create --base ${{ env.TARGET_BRANCH }} --head ${{ env.SYNC_BRANCH }} --title "$PR_TITLE" --body "This PR syncs changes from the community repository's ${{ env.SOURCE_BRANCH }} branch.")
echo "New Pull Request created: $pr_url"
else
echo "Pull Request already exists with number: $EXISTING_PR"
gh pr edit $EXISTING_PR --title "$PR_TITLE" --body "This PR syncs changes from the community repository's ${{ env.SOURCE_BRANCH }} branch. (Updated)"
echo "Existing Pull Request updated"
fi

6
.gitignore vendored
View File

@@ -82,6 +82,12 @@ tmp/
dist
.temp/
deploy/selfhost/plane-app/
## Storybook
*storybook.log
output.css
# Monitor
monitor/prime.key
monitor/prime.key.pub
monitor.db

View File

@@ -27,7 +27,7 @@ FILE_SIZE_LIMIT=5242880
# GPT settings
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
OPENAI_API_KEY="sk-" # deprecated
GPT_ENGINE="gpt-3.5-turbo" # deprecated
GPT_ENGINE="gpt-4o-mini" # deprecated
# Settings related to Docker
DOCKERIZED=1 # deprecated
# set to 1 If using the pre-configured minio setup

View File

@@ -49,7 +49,7 @@ export const InstanceAIForm: FC<IInstanceAIForm> = (props) => {
</a>
</>
),
placeholder: "gpt-3.5-turbo",
placeholder: "gpt-4o-mini",
error: Boolean(errors.GPT_ENGINE),
required: false,
},

View File

@@ -0,0 +1,244 @@
import { FC, useState } from "react";
import Link from "next/link";
import { useForm } from "react-hook-form";
// types
import { IFormattedInstanceConfiguration, TInstanceOIDCAuthenticationConfigurationKeys } from "@plane/types";
// ui
import { Button, TOAST_TYPE, getButtonStyling, setToast } from "@plane/ui";
// components
import {
ConfirmDiscardModal,
ControllerInput,
TControllerInputFormField,
CopyField,
TCopyField,
CodeBlock,
} from "@/components/common";
// helpers
import { cn } from "@/helpers/common.helper";
// hooks
import { useInstance } from "@/hooks/store";
type Props = {
config: IFormattedInstanceConfiguration;
};
type OIDCConfigFormValues = Record<TInstanceOIDCAuthenticationConfigurationKeys, string>;
export const InstanceOIDCConfigForm: FC<Props> = (props) => {
const { config } = props;
// states
const [isDiscardChangesModalOpen, setIsDiscardChangesModalOpen] = useState(false);
// store hooks
const { updateInstanceConfigurations } = useInstance();
// form data
const {
handleSubmit,
control,
reset,
formState: { errors, isDirty, isSubmitting },
} = useForm<OIDCConfigFormValues>({
defaultValues: {
OIDC_CLIENT_ID: config["OIDC_CLIENT_ID"],
OIDC_CLIENT_SECRET: config["OIDC_CLIENT_SECRET"],
OIDC_TOKEN_URL: config["OIDC_TOKEN_URL"],
OIDC_USERINFO_URL: config["OIDC_USERINFO_URL"],
OIDC_AUTHORIZE_URL: config["OIDC_AUTHORIZE_URL"],
OIDC_LOGOUT_URL: config["OIDC_LOGOUT_URL"],
OIDC_PROVIDER_NAME: config["OIDC_PROVIDER_NAME"],
},
});
const originURL = typeof window !== "undefined" ? window.location.origin : "";
const OIDC_FORM_FIELDS: TControllerInputFormField[] = [
{
key: "OIDC_CLIENT_ID",
type: "text",
label: "Client ID",
description: "A unique ID for this Plane app that you register on your IdP",
placeholder: "abc123xyz789",
error: Boolean(errors.OIDC_CLIENT_ID),
required: true,
},
{
key: "OIDC_CLIENT_SECRET",
type: "password",
label: "Client secret",
description: "The secret key that authenticates this Plane app to your IdP",
placeholder: "s3cr3tK3y123!",
error: Boolean(errors.OIDC_CLIENT_SECRET),
required: true,
},
{
key: "OIDC_AUTHORIZE_URL",
type: "text",
label: "Authorize URL",
description: (
<>
The URL that brings up your IdP{"'"}s authentication screen when your users click the{" "}
<CodeBlock>{"Continue with"}</CodeBlock>
</>
),
placeholder: "https://example.com/",
error: Boolean(errors.OIDC_AUTHORIZE_URL),
required: true,
},
{
key: "OIDC_TOKEN_URL",
type: "text",
label: "Token URL",
description: "The URL that talks to the IdP and persists user authentication on Plane",
placeholder: "https://example.com/oauth/token",
error: Boolean(errors.OIDC_TOKEN_URL),
required: true,
},
{
key: "OIDC_USERINFO_URL",
type: "text",
label: "Users' info URL",
description: "The URL that fetches your users' info from your IdP",
placeholder: "https://example.com/userinfo",
error: Boolean(errors.OIDC_USERINFO_URL),
required: true,
},
{
key: "OIDC_LOGOUT_URL",
type: "text",
label: "Logout URL",
description: "Optional field that controls where your users go after they log out of Plane",
placeholder: "https://example.com/logout",
error: Boolean(errors.OIDC_LOGOUT_URL),
required: false,
},
{
key: "OIDC_PROVIDER_NAME",
type: "text",
label: "IdP's name",
description: (
<>
Optional field for the name that your users see on the <CodeBlock>Continue with</CodeBlock> button
</>
),
placeholder: "Okta",
error: Boolean(errors.OIDC_PROVIDER_NAME),
required: false,
},
];
const OIDC_SERVICE_DETAILS: TCopyField[] = [
{
key: "Origin_URI",
label: "Origin URI",
url: `${originURL}/auth/oidc/`,
description:
"We will generate this for this Plane app. Add this as a trusted origin on your IdP's corresponding field.",
},
{
key: "Callback_URI",
label: "Callback URI",
url: `${originURL}/auth/oidc/callback/`,
description: (
<>
We will generate this for you.Add this in the{" "}
<CodeBlock darkerShade>Sign-in redirect URI</CodeBlock> field of
your IdP.
</>
),
},
{
key: "Logout_URI",
label: "Logout URI",
url: `${originURL}/auth/oidc/logout/`,
description: (
<>
We will generate this for you. Add this in the{" "}
<CodeBlock darkerShade>Logout redirect URI</CodeBlock> field of
your IdP.
</>
),
},
];
const onSubmit = async (formData: OIDCConfigFormValues) => {
const payload: Partial<OIDCConfigFormValues> = { ...formData };
await updateInstanceConfigurations(payload)
.then((response = []) => {
setToast({
type: TOAST_TYPE.SUCCESS,
title: "Done!",
message: "Your OIDC-based authentication is configured. You should test it now.",
});
reset({
OIDC_CLIENT_ID: response.find((item) => item.key === "OIDC_CLIENT_ID")?.value,
OIDC_CLIENT_SECRET: response.find((item) => item.key === "OIDC_CLIENT_SECRET")?.value,
OIDC_AUTHORIZE_URL: response.find((item) => item.key === "OIDC_AUTHORIZE_URL")?.value,
OIDC_TOKEN_URL: response.find((item) => item.key === "OIDC_TOKEN_URL")?.value,
OIDC_USERINFO_URL: response.find((item) => item.key === "OIDC_USERINFO_URL")?.value,
OIDC_LOGOUT_URL: response.find((item) => item.key === "OIDC_LOGOUT_URL")?.value,
OIDC_PROVIDER_NAME: response.find((item) => item.key === "OIDC_PROVIDER_NAME")?.value,
});
})
.catch((err) => console.error(err));
};
const handleGoBack = (e: React.MouseEvent<HTMLAnchorElement, MouseEvent>) => {
if (isDirty) {
e.preventDefault();
setIsDiscardChangesModalOpen(true);
}
};
return (
<>
<ConfirmDiscardModal
isOpen={isDiscardChangesModalOpen}
onDiscardHref="/authentication"
handleClose={() => setIsDiscardChangesModalOpen(false)}
/>
<div className="flex flex-col gap-8">
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1 pt-1">
<div className="pt-2.5 text-xl font-medium">IdP-provided details for Plane</div>
{OIDC_FORM_FIELDS.map((field) => (
<ControllerInput
key={field.key}
control={control}
type={field.type}
name={field.key}
label={field.label}
description={field.description}
placeholder={field.placeholder}
error={field.error}
required={field.required}
/>
))}
<div className="flex flex-col gap-1 pt-4">
<div className="flex items-center gap-4">
<Button variant="primary" onClick={handleSubmit(onSubmit)} loading={isSubmitting} disabled={!isDirty}>
{isSubmitting ? "Saving..." : "Save changes"}
</Button>
<Link
href="/authentication"
className={cn(getButtonStyling("link-neutral", "md"), "font-medium")}
onClick={handleGoBack}
>
Go back
</Link>
</div>
</div>
</div>
<div className="col-span-2 md:col-span-1">
<div className="flex flex-col gap-y-4 px-6 pt-1.5 pb-4 bg-custom-background-80/60 rounded-lg">
<div className="pt-2 text-xl font-medium">Plane-provided details for your IdP</div>
{OIDC_SERVICE_DETAILS.map((field) => (
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
))}
</div>
</div>
</div>
</div>
</>
);
};

View File

@@ -0,0 +1,120 @@
"use client";
import { useState } from "react";
import { observer } from "mobx-react-lite";
import Image from "next/image";
import useSWR from "swr";
// ui
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
// components
import { AuthenticationMethodCard } from "@/components/authentication";
import { PageHeader } from "@/components/common";
// hooks
import { useInstance } from "@/hooks/store";
// icons
import OIDCLogo from "/public/logos/oidc-logo.svg";
// plane admin hooks
import { useInstanceFlag } from "@/plane-admin/hooks/store/use-instance-flag";
// local components
import { InstanceOIDCConfigForm } from "./form";
const InstanceOIDCAuthenticationPage = observer(() => {
// state
const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
// store
const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
// plane admin store
const isOIDCEnabled = useInstanceFlag("OIDC_SAML_AUTH");
// config
const enableOIDCConfig = formattedConfig?.IS_OIDC_ENABLED ?? "";
useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
const updateConfig = async (key: "IS_OIDC_ENABLED", value: string) => {
setIsSubmitting(true);
const payload = {
[key]: value,
};
const updateConfigPromise = updateInstanceConfigurations(payload);
setPromiseToast(updateConfigPromise, {
loading: "Saving Configuration...",
success: {
title: "Configuration saved",
message: () => `OIDC authentication is now ${value ? "active" : "disabled"}.`,
},
error: {
title: "Error",
message: () => "Failed to save configuration",
},
});
await updateConfigPromise
.then(() => {
setIsSubmitting(false);
})
.catch((err) => {
console.error(err);
setIsSubmitting(false);
});
};
if (isOIDCEnabled === false) {
return (
<div className="relative container mx-auto w-full h-full p-4 py-4 my-6 space-y-6 flex flex-col">
<PageHeader title="Authentication - God Mode" />
<div className="text-center text-lg text-gray-500">
<p>OpenID Connect (OIDC) authentication is not enabled for this instance.</p>
<p>Activate any of your workspace to get this feature.</p>
</div>
</div>
);
}
return (
<>
<PageHeader title="Authentication - God Mode" />
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
<AuthenticationMethodCard
name="OIDC"
description="Authenticate your users via the OpenID connect protocol."
icon={<Image src={OIDCLogo} height={24} width={24} alt="OIDC Logo" />}
config={
<ToggleSwitch
value={Boolean(parseInt(enableOIDCConfig))}
onChange={() => {
Boolean(parseInt(enableOIDCConfig)) === true
? updateConfig("IS_OIDC_ENABLED", "0")
: updateConfig("IS_OIDC_ENABLED", "1");
}}
size="sm"
disabled={isSubmitting || !formattedConfig}
/>
}
disabled={isSubmitting || !formattedConfig}
withBorder={false}
/>
</div>
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
{formattedConfig ? (
<InstanceOIDCConfigForm config={formattedConfig} />
) : (
<Loader className="space-y-8">
<Loader.Item height="50px" width="25%" />
<Loader.Item height="50px" />
<Loader.Item height="50px" />
<Loader.Item height="50px" />
<Loader.Item height="50px" />
<Loader.Item height="50px" width="50%" />
</Loader>
)}
</div>
</div>
</>
);
});
export default InstanceOIDCAuthenticationPage;

View File

@@ -0,0 +1,245 @@
import { FC, useState } from "react";
import Link from "next/link";
import { Controller, useForm } from "react-hook-form";
// types
import { IFormattedInstanceConfiguration, TInstanceSAMLAuthenticationConfigurationKeys } from "@plane/types";
// ui
import { Button, TOAST_TYPE, TextArea, getButtonStyling, setToast } from "@plane/ui";
// components
import {
ConfirmDiscardModal,
ControllerInput,
TControllerInputFormField,
CopyField,
TCopyField,
CodeBlock,
} from "@/components/common";
// helpers
import { cn } from "@/helpers/common.helper";
// hooks
import { useInstance } from "@/hooks/store";
import { SAMLAttributeMappingTable } from "@/plane-admin/components/authentication";
type Props = {
config: IFormattedInstanceConfiguration;
};
type SAMLConfigFormValues = Record<TInstanceSAMLAuthenticationConfigurationKeys, string>;
export const InstanceSAMLConfigForm: FC<Props> = (props) => {
const { config } = props;
// states
const [isDiscardChangesModalOpen, setIsDiscardChangesModalOpen] = useState(false);
// store hooks
const { updateInstanceConfigurations } = useInstance();
// form data
const {
handleSubmit,
control,
reset,
formState: { errors, isDirty, isSubmitting },
} = useForm<SAMLConfigFormValues>({
defaultValues: {
SAML_ENTITY_ID: config["SAML_ENTITY_ID"],
SAML_SSO_URL: config["SAML_SSO_URL"],
SAML_LOGOUT_URL: config["SAML_LOGOUT_URL"],
SAML_CERTIFICATE: config["SAML_CERTIFICATE"],
SAML_PROVIDER_NAME: config["SAML_PROVIDER_NAME"],
},
});
const originURL = typeof window !== "undefined" ? window.location.origin : "";
const SAML_FORM_FIELDS: TControllerInputFormField[] = [
{
key: "SAML_ENTITY_ID",
type: "text",
label: "Entity ID",
description: "A unique ID for this Plane app that you register on your IdP",
placeholder: "70a44354520df8bd9bcd",
error: Boolean(errors.SAML_ENTITY_ID),
required: true,
},
{
key: "SAML_SSO_URL",
type: "text",
label: "SSO URL",
description: (
<>
The URL that brings up your IdP{"'"}s authentication screen when your users click the{" "}
<CodeBlock>{"Continue with"}</CodeBlock> button
</>
),
placeholder: "https://example.com/sso",
error: Boolean(errors.SAML_SSO_URL),
required: true,
},
{
key: "SAML_LOGOUT_URL",
type: "text",
label: "Logout URL",
description: "Optional field that tells your IdP your users have logged out of this Plane app",
placeholder: "https://example.com/logout",
error: Boolean(errors.SAML_LOGOUT_URL),
required: false,
},
{
key: "SAML_PROVIDER_NAME",
type: "text",
label: "IdP's name",
description: (
<>
Optional field for the name that your users see on the <CodeBlock>Continue with</CodeBlock> button
</>
),
placeholder: "Okta",
error: Boolean(errors.SAML_PROVIDER_NAME),
required: false,
},
];
const SAML_SERVICE_DETAILS: TCopyField[] = [
{
key: "Metadata_Information",
label: "Entity ID | Audience | Metadata information",
url: `${originURL}/auth/saml/metadata/`,
description:
"We will generate this bit of the metadata that identifies this Plane app as an authorized service on your IdP.",
},
{
key: "Callback_URI",
label: "Callback URI",
url: `${originURL}/auth/saml/callback/`,
description: (
<>
We will generate this{" "}
<CodeBlock darkerShade>http-post request</CodeBlock> URL that you
should paste into your <CodeBlock darkerShade>ACS URL</CodeBlock>{" "}
or <CodeBlock darkerShade>Sign-in call back URL</CodeBlock> field
on your IdP.
</>
),
},
{
key: "Logout_URI",
label: "Logout URI",
url: `${originURL}/auth/saml/logout/`,
description: (
<>
We will generate this{" "}
<CodeBlock darkerShade>http-redirect request</CodeBlock> URL that
you should paste into your{" "}
<CodeBlock darkerShade>SLS URL</CodeBlock> or{" "}
<CodeBlock darkerShade>Logout URL</CodeBlock>
field on your IdP.
</>
),
},
];
const onSubmit = async (formData: SAMLConfigFormValues) => {
const payload: Partial<SAMLConfigFormValues> = { ...formData };
await updateInstanceConfigurations(payload)
.then((response = []) => {
setToast({
type: TOAST_TYPE.SUCCESS,
title: "Done!",
message: "Your SAML-based authentication is configured. You should test it now.",
});
reset({
SAML_ENTITY_ID: response.find((item) => item.key === "SAML_ENTITY_ID")?.value,
SAML_SSO_URL: response.find((item) => item.key === "SAML_SSO_URL")?.value,
SAML_LOGOUT_URL: response.find((item) => item.key === "SAML_LOGOUT_URL")?.value,
SAML_CERTIFICATE: response.find((item) => item.key === "SAML_CERTIFICATE")?.value,
SAML_PROVIDER_NAME: response.find((item) => item.key === "SAML_PROVIDER_NAME")?.value,
});
})
.catch((err) => console.error(err));
};
const handleGoBack = (e: React.MouseEvent<HTMLAnchorElement, MouseEvent>) => {
if (isDirty) {
e.preventDefault();
setIsDiscardChangesModalOpen(true);
}
};
return (
<>
<ConfirmDiscardModal
isOpen={isDiscardChangesModalOpen}
onDiscardHref="/authentication"
handleClose={() => setIsDiscardChangesModalOpen(false)}
/>
<div className="flex flex-col gap-8">
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1 pt-1">
<div className="pt-2.5 text-xl font-medium">IdP-provided details for Plane</div>
{SAML_FORM_FIELDS.map((field) => (
<ControllerInput
key={field.key}
control={control}
type={field.type}
name={field.key}
label={field.label}
description={field.description}
placeholder={field.placeholder}
error={field.error}
required={field.required}
/>
))}
<div className="flex flex-col gap-1">
<h4 className="text-sm">SAML certificate</h4>
<Controller
control={control}
name="SAML_CERTIFICATE"
rules={{ required: "Certificate is required." }}
render={({ field: { value, onChange } }) => (
<TextArea
id="SAML_CERTIFICATE"
name="SAML_CERTIFICATE"
value={value}
onChange={onChange}
hasError={Boolean(errors.SAML_CERTIFICATE)}
placeholder="---BEGIN CERTIFICATE---\n2yWn1gc7DhOFB9\nr0gbE+\n---END CERTIFICATE---"
className="min-h-[102px] w-full rounded-md font-medium text-sm"
/>
)}
/>
<p className="pt-0.5 text-xs text-custom-text-300">
IdP-generated certificate for signing this Plane app as an authorized service provider for your IdP
</p>
</div>
<div className="flex flex-col gap-1 pt-4">
<div className="flex items-center gap-4">
<Button variant="primary" onClick={handleSubmit(onSubmit)} loading={isSubmitting} disabled={!isDirty}>
{isSubmitting ? "Saving..." : "Save changes"}
</Button>
<Link
href="/authentication"
className={cn(getButtonStyling("link-neutral", "md"), "font-medium")}
onClick={handleGoBack}
>
Go back
</Link>
</div>
</div>
</div>
<div className="col-span-2 md:col-span-1">
<div className="flex flex-col gap-y-4 px-6 pt-1.5 pb-4 bg-custom-background-80/60 rounded-lg">
<div className="pt-2 text-xl font-medium">Plane-provided details for your IdP</div>
{SAML_SERVICE_DETAILS.map((field) => (
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
))}
<div className="flex flex-col gap-1">
<h4 className="text-sm text-custom-text-200 font-medium">Mapping</h4>
<SAMLAttributeMappingTable />
</div>
</div>
</div>
</div>
</div>
</>
);
};

View File

@@ -0,0 +1,120 @@
"use client";
import { useState } from "react";
import { observer } from "mobx-react-lite";
import Image from "next/image";
import useSWR from "swr";
// ui
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
// components
import { AuthenticationMethodCard } from "@/components/authentication";
import { PageHeader } from "@/components/common";
// hooks
import { useInstance } from "@/hooks/store";
// icons
import SAMLLogo from "/public/logos/saml-logo.svg";
// plane admin hooks
import { useInstanceFlag } from "@/plane-admin/hooks/store/use-instance-flag";
// local components
import { InstanceSAMLConfigForm } from "./form";
const InstanceSAMLAuthenticationPage = observer(() => {
// state
const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
// store
const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
// plane admin store
const isSAMLEnabled = useInstanceFlag("OIDC_SAML_AUTH");
// config
const enableSAMLConfig = formattedConfig?.IS_SAML_ENABLED ?? "";
useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
const updateConfig = async (key: "IS_SAML_ENABLED", value: string) => {
setIsSubmitting(true);
const payload = {
[key]: value,
};
const updateConfigPromise = updateInstanceConfigurations(payload);
setPromiseToast(updateConfigPromise, {
loading: "Saving Configuration...",
success: {
title: "Configuration saved",
message: () => `SAML authentication is now ${value ? "active" : "disabled"}.`,
},
error: {
title: "Error",
message: () => "Failed to save configuration",
},
});
await updateConfigPromise
.then(() => {
setIsSubmitting(false);
})
.catch((err) => {
console.error(err);
setIsSubmitting(false);
});
};
if (isSAMLEnabled === false) {
return (
<div className="relative container mx-auto w-full h-full p-4 py-4 my-6 space-y-6 flex flex-col">
<PageHeader title="Authentication - God Mode" />
<div className="text-center text-lg text-gray-500">
<p>Security Assertion Markup Language (SAML) authentication is not enabled for this instance.</p>
<p>Activate any of your workspace to get this feature.</p>
</div>
</div>
);
}
return (
<>
<PageHeader title="Authentication - God Mode" />
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
<AuthenticationMethodCard
name="SAML"
description="Authenticate your users via Security Assertion Markup Language
protocol."
icon={<Image src={SAMLLogo} height={24} width={24} alt="SAML Logo" className="pl-0.5" />}
config={
<ToggleSwitch
value={Boolean(parseInt(enableSAMLConfig))}
onChange={() => {
Boolean(parseInt(enableSAMLConfig)) === true
? updateConfig("IS_SAML_ENABLED", "0")
: updateConfig("IS_SAML_ENABLED", "1");
}}
size="sm"
disabled={isSubmitting || !formattedConfig}
/>
}
disabled={isSubmitting || !formattedConfig}
withBorder={false}
/>
</div>
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
{formattedConfig ? (
<InstanceSAMLConfigForm config={formattedConfig} />
) : (
<Loader className="space-y-8">
<Loader.Item height="50px" width="25%" />
<Loader.Item height="50px" />
<Loader.Item height="50px" />
<Loader.Item height="50px" />
<Loader.Item height="50px" width="50%" />
</Loader>
)}
</div>
</div>
</>
);
});
export default InstanceSAMLAuthenticationPage;

View File

@@ -2,6 +2,7 @@
import { FC, ReactNode, useEffect } from "react";
import { observer } from "mobx-react";
import { useRouter } from "next/navigation";
import useSWR from "swr";
// components
import { InstanceSidebar } from "@/components/admin-sidebar";
import { InstanceHeader } from "@/components/auth-header";
@@ -9,6 +10,8 @@ import { LogoSpinner } from "@/components/common";
import { NewUserPopup } from "@/components/new-user-popup";
// hooks
import { useUser } from "@/hooks/store";
// plane admin hooks
import { useInstanceFeatureFlags } from "@/plane-admin/hooks/store/use-instance-feature-flag";
type TAdminLayout = {
children: ReactNode;
@@ -20,6 +23,14 @@ export const AdminLayout: FC<TAdminLayout> = observer((props) => {
const router = useRouter();
// store hooks
const { isUserLoggedIn } = useUser();
// plane admin hooks
const { fetchInstanceFeatureFlags } = useInstanceFeatureFlags();
// fetching instance feature flags
const { isLoading: flagsLoader, error: flagsError } = useSWR(
`INSTANCE_FEATURE_FLAGS`,
() => fetchInstanceFeatureFlags(),
{ revalidateOnFocus: false, revalidateIfStale: false, errorRetryCount: 1 }
);
useEffect(() => {
if (isUserLoggedIn === false) {
@@ -27,7 +38,7 @@ export const AdminLayout: FC<TAdminLayout> = observer((props) => {
}
}, [router, isUserLoggedIn]);
if (isUserLoggedIn === undefined) {
if ((flagsLoader && !flagsError) || isUserLoggedIn === undefined) {
return (
<div className="relative flex h-screen w-full items-center justify-center">
<LogoSpinner />

View File

@@ -1 +1,85 @@
export * from "ce/components/authentication/authentication-modes";
import { observer } from "mobx-react";
import Image from "next/image";
import { useTheme } from "next-themes";
import {
TInstanceAuthenticationMethodKeys as TBaseAuthenticationMethods,
TInstanceAuthenticationModes,
TInstanceEnterpriseAuthenticationMethodKeys,
} from "@plane/types";
import { getAuthenticationModes as getCEAuthenticationModes } from "@/ce/components/authentication/authentication-modes";
// types
// components
import { AuthenticationMethodCard } from "@/components/authentication";
// helpers
import { getBaseAuthenticationModes } from "@/helpers/authentication.helper";
// plane admin components
import { OIDCConfiguration, SAMLConfiguration } from "@/plane-admin/components/authentication";
// images
import { useInstanceFlag } from "@/plane-admin/hooks/store/use-instance-flag";
import OIDCLogo from "@/public/logos/oidc-logo.svg";
import SAMLLogo from "@/public/logos/saml-logo.svg";
// plane admin hooks
type TInstanceAuthenticationMethodKeys = TBaseAuthenticationMethods | TInstanceEnterpriseAuthenticationMethodKeys;
export type TAuthenticationModeProps = {
disabled: boolean;
updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
};
export type TGetAuthenticationModeProps = {
disabled: boolean;
updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
resolvedTheme: string | undefined;
};
// Enterprise authentication methods
export const getAuthenticationModes: (props: TGetAuthenticationModeProps) => TInstanceAuthenticationModes[] = ({
disabled,
updateConfig,
resolvedTheme,
}) => [
...getBaseAuthenticationModes({ disabled, updateConfig, resolvedTheme }),
{
key: "oidc",
name: "OIDC",
description: "Authenticate your users via the OpenID Connect protocol.",
icon: <Image src={OIDCLogo} height={22} width={22} alt="OIDC Logo" />,
config: <OIDCConfiguration disabled={disabled} updateConfig={updateConfig} />,
},
{
key: "saml",
name: "SAML",
description: "Authenticate your users via the Security Assertion Markup Language protocol.",
icon: <Image src={SAMLLogo} height={22} width={22} alt="SAML Logo" className="pl-0.5" />,
config: <SAMLConfiguration disabled={disabled} updateConfig={updateConfig} />,
},
];
export const AuthenticationModes: React.FC<TAuthenticationModeProps> = observer((props) => {
const { disabled, updateConfig } = props;
// next-themes
const { resolvedTheme } = useTheme();
// plane admin hooks
const isOIDCSAMLEnabled = useInstanceFlag("OIDC_SAML_AUTH");
const authenticationModes = isOIDCSAMLEnabled
? getAuthenticationModes({ disabled, updateConfig, resolvedTheme })
: getCEAuthenticationModes({ disabled, updateConfig, resolvedTheme });
return (
<>
{authenticationModes.map((method) => (
<AuthenticationMethodCard
key={method.key}
name={method.name}
description={method.description}
icon={method.icon}
config={method.config}
disabled={disabled}
unavailable={method.unavailable}
/>
))}
</>
);
});

View File

@@ -1 +1,4 @@
export * from "./authentication-modes";
export * from "./oidc-config";
export * from "./saml-config";
export * from "./saml-attribute-mapping-table";

View File

@@ -0,0 +1,72 @@
"use client";
import React from "react";
import { observer } from "mobx-react-lite";
import Link from "next/link";
// icons
import { Settings2 } from "lucide-react";
// types
import { TInstanceEnterpriseAuthenticationMethodKeys } from "@plane/types";
// ui
import { ToggleSwitch, getButtonStyling } from "@plane/ui";
// helpers
import { cn } from "@/helpers/common.helper";
// hooks
import { useInstance } from "@/hooks/store";
type Props = {
disabled: boolean;
updateConfig: (
key: TInstanceEnterpriseAuthenticationMethodKeys,
value: string
) => void;
};
export const OIDCConfiguration: React.FC<Props> = observer((props) => {
const { disabled, updateConfig } = props;
// store
const { formattedConfig } = useInstance();
// derived values
const enableOIDCConfig = formattedConfig?.IS_OIDC_ENABLED ?? "";
const isOIDCConfigured =
!!formattedConfig?.OIDC_CLIENT_ID && !!formattedConfig?.OIDC_CLIENT_SECRET;
return (
<>
{isOIDCConfigured ? (
<div className="flex items-center gap-4">
<Link
href="/authentication/oidc"
className={cn(
getButtonStyling("link-primary", "md"),
"font-medium"
)}
>
Edit
</Link>
<ToggleSwitch
value={Boolean(parseInt(enableOIDCConfig))}
onChange={() => {
Boolean(parseInt(enableOIDCConfig)) === true
? updateConfig("IS_OIDC_ENABLED", "0")
: updateConfig("IS_OIDC_ENABLED", "1");
}}
size="sm"
disabled={disabled}
/>
</div>
) : (
<Link
href="/authentication/oidc"
className={cn(
getButtonStyling("neutral-primary", "sm"),
"text-custom-text-300"
)}
>
<Settings2 className="h-4 w-4 p-0.5 text-custom-text-300/80" />
Configure
</Link>
)}
</>
);
});

View File

@@ -0,0 +1,28 @@
export const SAMLAttributeMappingTable = () => (
<table className="table-auto border-collapse text-custom-text-200 text-sm">
<thead>
<tr className="text-left">
<th className="border-b border-r border-custom-border-300 px-4 py-1.5">IdP</th>
<th className="border-b border-custom-border-300 px-4 py-1.5">Plane</th>
</tr>
</thead>
<tbody>
<tr>
<td className="border-t border-r border-custom-border-300 px-4 py-1.5">Name ID format</td>
<td className="border-t border-custom-border-300 px-4 py-1.5">emailAddress</td>
</tr>
<tr>
<td className="border-t border-r border-custom-border-300 px-4 py-1.5">first_name</td>
<td className="border-t border-custom-border-300 px-4 py-1.5">user.firstName</td>
</tr>
<tr>
<td className="border-t border-r border-custom-border-300 px-4 py-1.5">last_name</td>
<td className="border-t border-custom-border-300 px-4 py-1.5">user.lastName</td>
</tr>
<tr>
<td className="border-t border-r border-custom-border-300 px-4 py-1.5">email</td>
<td className="border-t border-custom-border-300 px-4 py-1.5">user.email</td>
</tr>
</tbody>
</table>
);

View File

@@ -0,0 +1,72 @@
"use client";
import React from "react";
import { observer } from "mobx-react-lite";
import Link from "next/link";
// icons
import { Settings2 } from "lucide-react";
// types
import { TInstanceEnterpriseAuthenticationMethodKeys } from "@plane/types";
// ui
import { ToggleSwitch, getButtonStyling } from "@plane/ui";
// helpers
import { cn } from "@/helpers/common.helper";
// hooks
import { useInstance } from "@/hooks/store";
type Props = {
disabled: boolean;
updateConfig: (
key: TInstanceEnterpriseAuthenticationMethodKeys,
value: string
) => void;
};
export const SAMLConfiguration: React.FC<Props> = observer((props) => {
const { disabled, updateConfig } = props;
// store
const { formattedConfig } = useInstance();
// derived values
const enableSAMLConfig = formattedConfig?.IS_SAML_ENABLED ?? "";
const isSAMLConfigured =
!!formattedConfig?.SAML_ENTITY_ID && !!formattedConfig?.SAML_CERTIFICATE;
return (
<>
{isSAMLConfigured ? (
<div className="flex items-center gap-4">
<Link
href="/authentication/saml"
className={cn(
getButtonStyling("link-primary", "md"),
"font-medium"
)}
>
Edit
</Link>
<ToggleSwitch
value={Boolean(parseInt(enableSAMLConfig))}
onChange={() => {
Boolean(parseInt(enableSAMLConfig)) === true
? updateConfig("IS_SAML_ENABLED", "0")
: updateConfig("IS_SAML_ENABLED", "1");
}}
size="sm"
disabled={disabled}
/>
</div>
) : (
<Link
href="/authentication/saml"
className={cn(
getButtonStyling("neutral-primary", "sm"),
"text-custom-text-300"
)}
>
<Settings2 className="h-4 w-4 p-0.5 text-custom-text-300/80" />
Configure
</Link>
)}
</>
);
});

View File

@@ -1 +1 @@
export * from "ce/components/common";
export * from "./upgrade-button";

View File

@@ -0,0 +1,39 @@
"use client";
import React from "react";
// ui
import { AlertModalCore, Button } from "@plane/ui";
// helpers
import { WEB_BASE_URL } from "@/helpers/common.helper";
export const UpgradeButton: React.FC = () => {
// states
const [isActivationModalOpen, setIsActivationModalOpen] = React.useState(false);
// derived values
const redirectionLink = encodeURI(WEB_BASE_URL + "/");
return (
<>
<AlertModalCore
variant="primary"
isOpen={isActivationModalOpen}
handleClose={() => setIsActivationModalOpen(false)}
handleSubmit={() => {
window.open(redirectionLink, "_blank");
setIsActivationModalOpen(false);
}}
isSubmitting={false}
title="Activate workspace"
content="Activate any of your workspace to get this feature."
primaryButtonText={{
loading: "Redirecting...",
default: "Go to Plane",
}}
secondaryButtonText="Close"
/>
<Button variant="primary" size="sm" onClick={() => setIsActivationModalOpen(true)}>
Activate workspace
</Button>
</>
);
};

View File

@@ -0,0 +1,11 @@
import { useContext } from "react";
// context
import { StoreContext } from "@/lib/store-provider";
// plane admin stores
import { IInstanceFeatureFlagsStore } from "@/plane-admin/store/instance-feature-flags.store";
export const useInstanceFeatureFlags = (): IInstanceFeatureFlagsStore => {
const context = useContext(StoreContext);
if (context === undefined) throw new Error("useInstanceFeatureFlags must be used within StoreProvider");
return context.instanceFeatureFlags;
};

View File

@@ -0,0 +1,13 @@
import { useContext } from "react";
// context
import { StoreContext } from "@/lib/store-provider";
export enum E_FEATURE_FLAGS {
OIDC_SAML_AUTH = "OIDC_SAML_AUTH",
}
export const useInstanceFlag = (flag: keyof typeof E_FEATURE_FLAGS, defaultValue: boolean = false): boolean => {
const context = useContext(StoreContext);
if (context === undefined) throw new Error("useInstanceFlag must be used within StoreProvider");
return context.instanceFeatureFlags.flags?.[E_FEATURE_FLAGS[flag]] ?? defaultValue;
};

View File

@@ -0,0 +1,21 @@
import { API_BASE_URL } from "@/helpers/common.helper";
// services
import { APIService } from "@/services/api.service";
export type TInstanceFeatureFlagsResponse = {
[featureFlag: string]: boolean;
};
export class InstanceFeatureFlagService extends APIService {
constructor() {
super(API_BASE_URL);
}
async getInstanceFeatureFlags(): Promise<TInstanceFeatureFlagsResponse> {
return this.get<TInstanceFeatureFlagsResponse>("/api/instances/admins/feature-flags/")
.then((response) => response.data)
.catch((error) => {
throw error;
});
}
}

View File

@@ -0,0 +1,50 @@
import { set } from "lodash";
import { action, makeObservable, observable, runInAction } from "mobx";
// services
import {
InstanceFeatureFlagService,
TInstanceFeatureFlagsResponse,
} from "@/plane-admin/services/instance-feature-flag.service";
const instanceFeatureFlagService = new InstanceFeatureFlagService();
type TFeatureFlagsMaps = Record<string, boolean>; // feature flag -> boolean
export interface IInstanceFeatureFlagsStore {
flags: TFeatureFlagsMaps;
// actions
hydrate: (data: any) => void;
fetchInstanceFeatureFlags: () => Promise<TInstanceFeatureFlagsResponse>;
}
export class InstanceFeatureFlagsStore implements IInstanceFeatureFlagsStore {
flags: TFeatureFlagsMaps = {};
constructor() {
makeObservable(this, {
flags: observable,
fetchInstanceFeatureFlags: action,
});
}
hydrate = (data: any) => {
if (data) this.flags = data;
};
fetchInstanceFeatureFlags = async () => {
try {
const response = await instanceFeatureFlagService.getInstanceFeatureFlags();
runInAction(() => {
if (response) {
Object.keys(response).forEach((key) => {
set(this.flags, key, response[key]);
});
}
});
return response;
} catch (error) {
console.error("Error fetching instance feature flags", error);
throw error;
}
};
}

View File

@@ -1 +1,29 @@
export * from "ce/store/root.store";
import { enableStaticRendering } from "mobx-react";
// stores
import {
IInstanceFeatureFlagsStore,
InstanceFeatureFlagsStore,
} from "@/plane-admin/store/instance-feature-flags.store";
import { CoreRootStore } from "@/store/root.store";
// plane admin store
enableStaticRendering(typeof window === "undefined");
export class RootStore extends CoreRootStore {
instanceFeatureFlags: IInstanceFeatureFlagsStore;
constructor() {
super();
this.instanceFeatureFlags = new InstanceFeatureFlagsStore();
}
hydrate(initialData: any) {
super.hydrate(initialData);
this.instanceFeatureFlags.hydrate(initialData.instanceFeatureFlags);
}
resetOnSignOut() {
super.resetOnSignOut();
this.instanceFeatureFlags = new InstanceFeatureFlagsStore();
}
}

View File

@@ -7,7 +7,8 @@
"@/*": ["core/*"],
"@/helpers/*": ["helpers/*"],
"@/public/*": ["public/*"],
"@/plane-admin/*": ["ce/*"]
"@/plane-admin/*": ["ee/*"],
"@/ce/*": ["ce/*"]
}
},
"include": ["next-env.d.ts", "next.config.js", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],

View File

@@ -56,6 +56,7 @@ GUNICORN_WORKERS=2
ADMIN_BASE_URL=
SPACE_BASE_URL=
APP_BASE_URL=
SILO_BASE_URL=
# Hard delete files after days
HARD_DELETE_AFTER_DAYS=60

View File

@@ -3,7 +3,8 @@ FROM python:3.12.5-alpine AS backend
# set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
ENV INSTANCE_CHANGELOG_URL https://api.plane.so/api/public/anchor/c210fcf7b0ff439490b1cd606b4bb92b/pages/
WORKDIR /code

View File

@@ -4,6 +4,7 @@ FROM python:3.12.5-alpine AS backend
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
ENV INSTANCE_CHANGELOG_URL https://api.plane.so/api/public/anchor/c210fcf7b0ff439490b1cd606b4bb92b/pages/
RUN apk --no-cache add \
"bash~=5.2" \
@@ -20,7 +21,8 @@ RUN apk --no-cache add \
"make" \
"postgresql-dev" \
"libc-dev" \
"linux-headers"
"linux-headers" \
"xmlsec-dev"
WORKDIR /code

1
apiserver/README.md Normal file
View File

@@ -0,0 +1 @@
# API SERVER

View File

@@ -0,0 +1,18 @@
#!/bin/bash
set -e
export SKIP_ENV_VAR=0
python manage.py wait_for_db
# Wait for migrations
python manage.py wait_for_migrations
# Clear Cache before starting to remove stale values
python manage.py clear_cache
# Register instance if INSTANCE_ADMIN_EMAIL is set
if [ -n "$INSTANCE_ADMIN_EMAIL" ]; then
python manage.py setup_instance $INSTANCE_ADMIN_EMAIL
fi
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -

View File

@@ -0,0 +1,39 @@
#!/bin/bash
set -e
python manage.py wait_for_db
# Wait for migrations
python manage.py wait_for_migrations
# Create the default bucket
#!/bin/bash
# Collect system information
HOSTNAME=$(hostname)
MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
CPU_INFO=$(cat /proc/cpuinfo)
MEMORY_INFO=$(free -h)
DISK_INFO=$(df -h)
# Concatenate information and compute SHA-256 hash
SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
# Export the variables
MACHINE_SIGNATURE=${MACHINE_SIGNATURE:-$SIGNATURE}
export SKIP_ENV_VAR=1
# Register instance
python manage.py register_instance_ee "$MACHINE_SIGNATURE"
# Load the configuration variable
python manage.py configure_instance
# Create the default bucket
python manage.py create_bucket
# Clear Cache before starting to remove stale values
python manage.py clear_cache
# Clear workspace licenses
python manage.py clear_workspace_licenses
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -

View File

@@ -32,4 +32,4 @@ python manage.py create_bucket
# Clear Cache before starting to remove stale values
python manage.py clear_cache
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -

View File

@@ -19,6 +19,7 @@ from plane.app.permissions import ProjectLitePermission
from plane.bgtasks.issue_activities_task import issue_activity
from plane.db.models import (
Inbox,
IssueType,
InboxIssue,
Issue,
Project,
@@ -145,6 +146,11 @@ class InboxIssueAPIEndpoint(BaseAPIView):
is_triage=True,
)
# Get the issue type
issue_type = IssueType.objects.filter(
project_issue_types__project_id=project_id, is_default=True
).first()
# create an issue
issue = Issue.objects.create(
name=request.data.get("issue", {}).get("name"),
@@ -155,6 +161,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
priority=request.data.get("issue", {}).get("priority", "none"),
project_id=project_id,
state=state,
type=issue_type,
)
# create an inbox issue

View File

@@ -0,0 +1,8 @@
from rest_framework.authentication import SessionAuthentication
class BaseSessionAuthentication(SessionAuthentication):
# Disable csrf for the rest apis
def enforce_csrf(self, request):
return

View File

@@ -30,7 +30,6 @@ from .project import (
ProjectIdentifierSerializer,
ProjectLiteSerializer,
ProjectMemberLiteSerializer,
DeployBoardSerializer,
ProjectMemberAdminSerializer,
ProjectPublicMemberSerializer,
ProjectMemberRoleSerializer,
@@ -39,11 +38,13 @@ from .state import StateSerializer, StateLiteSerializer
from .view import (
IssueViewSerializer,
)
from .cycle import (
CycleSerializer,
CycleIssueSerializer,
CycleWriteSerializer,
CycleUserPropertiesSerializer,
EntityProgressSerializer,
)
from .asset import FileAssetSerializer
from .issue import (
@@ -130,3 +131,14 @@ from .draft import (
DraftIssueSerializer,
DraftIssueDetailSerializer,
)
from .integration import (
IntegrationSerializer,
WorkspaceIntegrationSerializer,
GithubIssueSyncSerializer,
GithubRepositorySerializer,
GithubRepositorySyncSerializer,
GithubCommentSyncSerializer,
SlackProjectSyncSerializer,
)
from .deploy_board import DeployBoardSerializer

View File

@@ -3,12 +3,14 @@ from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .issue import IssueStateSerializer
from plane.db.models import (
Cycle,
CycleIssue,
CycleUserProperties,
)
from plane.ee.models import EntityProgress
class CycleWriteSerializer(BaseSerializer):
@@ -93,6 +95,7 @@ class CycleIssueSerializer(BaseSerializer):
"cycle",
]
class CycleUserPropertiesSerializer(BaseSerializer):
class Meta:
model = CycleUserProperties
@@ -102,3 +105,9 @@ class CycleUserPropertiesSerializer(BaseSerializer):
"project",
"cycle" "user",
]
class EntityProgressSerializer(BaseSerializer):
class Meta:
model = EntityProgress
fields = "__all__"

View File

@@ -0,0 +1,21 @@
# Module imports
from .base import BaseSerializer
from plane.app.serializers.project import ProjectLiteSerializer
from plane.app.serializers.workspace import WorkspaceLiteSerializer
from plane.db.models import DeployBoard
class DeployBoardSerializer(BaseSerializer):
project_details = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
)
class Meta:
model = DeployBoard
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"anchor",
]

View File

@@ -12,6 +12,7 @@ from plane.db.models import (
Label,
State,
DraftIssue,
IssueType,
DraftIssueAssignee,
DraftIssueLabel,
DraftIssueCycle,
@@ -33,6 +34,12 @@ class DraftIssueCreateSerializer(BaseSerializer):
required=False,
allow_null=True,
)
type_id = serializers.PrimaryKeyRelatedField(
source="type",
queryset=IssueType.objects.all(),
required=False,
allow_null=True,
)
label_ids = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
@@ -84,11 +91,21 @@ class DraftIssueCreateSerializer(BaseSerializer):
workspace_id = self.context["workspace_id"]
project_id = self.context["project_id"]
issue_type = validated_data.pop("type", None)
if not issue_type:
# Get default issue type
issue_type = IssueType.objects.filter(
project_issue_types__project_id=project_id, is_default=True
).first()
issue_type = issue_type
# Create Issue
issue = DraftIssue.objects.create(
**validated_data,
workspace_id=workspace_id,
project_id=project_id,
type=issue_type,
)
# Issue Audit Users

View File

@@ -0,0 +1,8 @@
from .base import IntegrationSerializer, WorkspaceIntegrationSerializer
from .github import (
GithubRepositorySerializer,
GithubRepositorySyncSerializer,
GithubIssueSyncSerializer,
GithubCommentSyncSerializer,
)
from .slack import SlackProjectSyncSerializer

View File

@@ -0,0 +1,22 @@
# Module imports
from plane.app.serializers import BaseSerializer
from plane.db.models import Integration, WorkspaceIntegration
class IntegrationSerializer(BaseSerializer):
class Meta:
model = Integration
fields = "__all__"
read_only_fields = [
"verified",
]
class WorkspaceIntegrationSerializer(BaseSerializer):
integration_detail = IntegrationSerializer(
read_only=True, source="integration"
)
class Meta:
model = WorkspaceIntegration
fields = "__all__"

View File

@@ -0,0 +1,45 @@
# Module imports
from plane.app.serializers import BaseSerializer
from plane.db.models import (
GithubIssueSync,
GithubRepository,
GithubRepositorySync,
GithubCommentSync,
)
class GithubRepositorySerializer(BaseSerializer):
class Meta:
model = GithubRepository
fields = "__all__"
class GithubRepositorySyncSerializer(BaseSerializer):
repo_detail = GithubRepositorySerializer(source="repository")
class Meta:
model = GithubRepositorySync
fields = "__all__"
class GithubIssueSyncSerializer(BaseSerializer):
class Meta:
model = GithubIssueSync
fields = "__all__"
read_only_fields = [
"project",
"workspace",
"repository_sync",
]
class GithubCommentSyncSerializer(BaseSerializer):
class Meta:
model = GithubCommentSync
fields = "__all__"
read_only_fields = [
"project",
"workspace",
"repository_sync",
"issue_sync",
]

View File

@@ -0,0 +1,14 @@
# Module imports
from plane.app.serializers import BaseSerializer
from plane.db.models import SlackProjectSync
class SlackProjectSyncSerializer(BaseSerializer):
class Meta:
model = SlackProjectSync
fields = "__all__"
read_only_fields = [
"project",
"workspace",
"workspace_integration",
]

View File

@@ -33,6 +33,7 @@ from plane.db.models import (
IssueVote,
IssueRelation,
State,
IssueType,
)
@@ -52,6 +53,7 @@ class IssueFlatSerializer(BaseSerializer):
"sequence_id",
"sort_order",
"is_draft",
"type_id",
]
@@ -79,6 +81,12 @@ class IssueCreateSerializer(BaseSerializer):
required=False,
allow_null=True,
)
type_id = serializers.PrimaryKeyRelatedField(
source="type",
queryset=IssueType.objects.all(),
required=False,
allow_null=True,
)
parent_id = serializers.PrimaryKeyRelatedField(
source="parent",
queryset=Issue.objects.all(),
@@ -135,10 +143,20 @@ class IssueCreateSerializer(BaseSerializer):
workspace_id = self.context["workspace_id"]
default_assignee_id = self.context["default_assignee_id"]
issue_type = validated_data.pop("type", None)
if not issue_type:
# Get default issue type
issue_type = IssueType.objects.filter(
project_issue_types__project_id=project_id, is_default=True
).first()
issue_type = issue_type
# Create Issue
issue = Issue.objects.create(
**validated_data,
project_id=project_id,
type=issue_type,
)
# Issue Audit Users
@@ -660,6 +678,7 @@ class IssueInboxSerializer(DynamicBaseSerializer):
"created_at",
"label_ids",
"created_by",
"type_id",
]
read_only_fields = fields
@@ -715,6 +734,7 @@ class IssueSerializer(DynamicBaseSerializer):
"link_count",
"is_draft",
"archived_at",
"type_id",
]
read_only_fields = fields
@@ -726,6 +746,7 @@ class IssueLiteSerializer(DynamicBaseSerializer):
"id",
"sequence_id",
"project_id",
"type_id",
]
read_only_fields = fields

View File

@@ -30,6 +30,7 @@ class PageSerializer(BaseSerializer):
child=serializers.UUIDField(),
required=False,
)
anchor = serializers.CharField(read_only=True)
class Meta:
model = Page
@@ -53,10 +54,12 @@ class PageSerializer(BaseSerializer):
"logo_props",
"label_ids",
"project_ids",
"anchor",
]
read_only_fields = [
"workspace",
"owned_by",
"anchor",
]
def create(self, validated_data):
@@ -125,6 +128,7 @@ class PageSerializer(BaseSerializer):
class PageDetailSerializer(PageSerializer):
description_html = serializers.CharField()
is_favorite = serializers.BooleanField(read_only=True)
class Meta(PageSerializer.Meta):
fields = PageSerializer.Meta.fields + [

View File

@@ -13,7 +13,6 @@ from plane.db.models import (
ProjectMember,
ProjectMemberInvite,
ProjectIdentifier,
DeployBoard,
ProjectPublicMember,
)
@@ -119,6 +118,12 @@ class ProjectListSerializer(DynamicBaseSerializer):
anchor = serializers.CharField(read_only=True)
members = serializers.SerializerMethodField()
cover_image_url = serializers.CharField(read_only=True)
# EE: project_grouping starts
state_id = serializers.UUIDField(read_only=True)
priority = serializers.CharField(read_only=True)
start_date = serializers.DateTimeField(read_only=True)
target_date = serializers.DateTimeField(read_only=True)
# EE: project_grouping ends
def get_members(self, obj):
project_members = getattr(obj, "members_list", None)
@@ -210,22 +215,6 @@ class ProjectMemberLiteSerializer(BaseSerializer):
read_only_fields = fields
class DeployBoardSerializer(BaseSerializer):
project_details = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
)
class Meta:
model = DeployBoard
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"anchor",
]
class ProjectPublicMemberSerializer(BaseSerializer):
class Meta:
model = ProjectPublicMember

View File

@@ -9,6 +9,7 @@ from plane.utils.issue_filters import issue_filters
class IssueViewSerializer(DynamicBaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
anchor = serializers.CharField(read_only=True)
class Meta:
model = IssueView

View File

@@ -3,6 +3,9 @@ import socket
import ipaddress
from urllib.parse import urlparse
# Django imports
from django.conf import settings
# Third party imports
from rest_framework import serializers
@@ -45,6 +48,12 @@ class WebhookSerializer(DynamicBaseSerializer):
{"url": "URL resolves to a blocked IP address."}
)
# if in cloud environment, private IP addresses are also not allowed
if settings.IS_MULTI_TENANT and ip.is_private:
raise serializers.ValidationError(
{"url": "URL resolves to a blocked IP address."}
)
# Additional validation for multiple request domains and their subdomains
request = self.context.get("request")
disallowed_domains = [
@@ -97,6 +106,12 @@ class WebhookSerializer(DynamicBaseSerializer):
{"url": "URL resolves to a blocked IP address."}
)
# if in cloud environment, private IP addresses are also not allowed
if settings.IS_MULTI_TENANT and ip.is_private:
raise serializers.ValidationError(
{"url": "URL resolves to a blocked IP address."}
)
# Additional validation for multiple request domains and their subdomains
request = self.context.get("request")
disallowed_domains = [

View File

@@ -66,6 +66,8 @@ class WorkSpaceMemberSerializer(DynamicBaseSerializer):
class WorkspaceMemberMeSerializer(BaseSerializer):
draft_issue_count = serializers.IntegerField(read_only=True)
active_cycles_count = serializers.IntegerField(read_only=True)
class Meta:
model = WorkspaceMember
fields = "__all__"

View File

@@ -18,6 +18,13 @@ from .views import urlpatterns as view_urls
from .webhook import urlpatterns as webhook_urls
from .workspace import urlpatterns as workspace_urls
# Integrations URLS
from .importer import urlpatterns as importer_urls
from .integration import urlpatterns as integration_urls
# url patterns
from plane.ee.urls.app import urlpatterns as ee_urls
urlpatterns = [
*analytic_urls,
*asset_urls,
@@ -38,4 +45,8 @@ urlpatterns = [
*workspace_urls,
*api_urls,
*webhook_urls,
# ee
*integration_urls,
*importer_urls,
*ee_urls,
]

View File

@@ -9,6 +9,7 @@ from plane.app.views import (
CycleProgressEndpoint,
CycleAnalyticsEndpoint,
TransferCycleIssueEndpoint,
CycleIssueStateAnalyticsEndpoint,
CycleUserPropertiesEndpoint,
CycleArchiveUnarchiveEndpoint,
)
@@ -118,4 +119,9 @@ urlpatterns = [
CycleAnalyticsEndpoint.as_view(),
name="project-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-progress/",
CycleIssueStateAnalyticsEndpoint.as_view(),
name="project-cycle-progress",
),
]

View File

@@ -0,0 +1,43 @@
from django.urls import path
from plane.app.views import (
ServiceIssueImportSummaryEndpoint,
ImportServiceEndpoint,
UpdateServiceImportStatusEndpoint,
BulkImportIssuesEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/importers/<str:service>/",
ServiceIssueImportSummaryEndpoint.as_view(),
name="importer-summary",
),
path(
"workspaces/<str:slug>/projects/importers/<str:service>/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/importers/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/importers/<str:service>/<uuid:pk>/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/service/<str:service>/importers/<uuid:importer_id>/",
UpdateServiceImportStatusEndpoint.as_view(),
name="importer-status",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
BulkImportIssuesEndpoint.as_view(),
name="bulk-import-issues",
),
]

View File

@@ -0,0 +1,150 @@
from django.urls import path
from plane.app.views import (
IntegrationViewSet,
WorkspaceIntegrationViewSet,
GithubRepositoriesEndpoint,
GithubRepositorySyncViewSet,
GithubIssueSyncViewSet,
GithubCommentSyncViewSet,
BulkCreateGithubIssueSyncEndpoint,
SlackProjectSyncViewSet,
)
urlpatterns = [
path(
"integrations/",
IntegrationViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="integrations",
),
path(
"integrations/<uuid:pk>/",
IntegrationViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/",
WorkspaceIntegrationViewSet.as_view(
{
"get": "list",
}
),
name="workspace-integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/<str:provider>/",
WorkspaceIntegrationViewSet.as_view(
{
"post": "create",
}
),
name="workspace-integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/<uuid:pk>/provider/",
WorkspaceIntegrationViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="workspace-integrations",
),
# Github Integrations
path(
"workspaces/<str:slug>/workspace-integrations/<uuid:workspace_integration_id>/github-repositories/",
GithubRepositoriesEndpoint.as_view(),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/",
GithubRepositorySyncViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/<uuid:pk>/",
GithubRepositorySyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/",
GithubIssueSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/bulk-create-github-issue-sync/",
BulkCreateGithubIssueSyncEndpoint.as_view(),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
GithubIssueSyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/",
GithubCommentSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/<uuid:pk>/",
GithubCommentSyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
## End Github Integrations
# Slack Integration
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/",
SlackProjectSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/<uuid:pk>/",
SlackProjectSyncViewSet.as_view(
{
"delete": "destroy",
"get": "retrieve",
}
),
),
## End Slack Integration
]

View File

@@ -18,7 +18,6 @@ from plane.app.views import (
IssueUserDisplayPropertyEndpoint,
IssueViewSet,
LabelViewSet,
BulkArchiveIssuesEndpoint,
DeletedIssuesListViewSet,
IssuePaginatedViewSet,
IssueAttachmentV2Endpoint,
@@ -90,11 +89,6 @@ urlpatterns = [
BulkDeleteIssuesEndpoint.as_view(),
name="project-issues-bulk",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-archive-issues/",
BulkArchiveIssuesEndpoint.as_view(),
name="bulk-archive-issues",
),
##
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",

View File

@@ -44,6 +44,17 @@ urlpatterns = [
),
name="user-favorite-pages",
),
# Lock
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/lock/",
PageViewSet.as_view(
{
"post": "lock",
"delete": "unlock",
}
),
name="project-page-lock-unlock",
),
# archived pages
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/archive/",

View File

@@ -4,6 +4,9 @@ from django.urls import path
from plane.app.views import (
GlobalSearchEndpoint,
IssueSearchEndpoint,
SearchEndpoint,
WorkspaceSearchEndpoint,
WorkspaceEntitySearchEndpoint,
)
@@ -18,4 +21,19 @@ urlpatterns = [
IssueSearchEndpoint.as_view(),
name="project-issue-search",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/search/",
SearchEndpoint.as_view(),
name="search",
),
path(
"workspaces/<str:slug>/app-search/",
WorkspaceSearchEndpoint.as_view(),
name="app-search",
),
path(
"workspaces/<str:slug>/entity-search/",
WorkspaceEntitySearchEndpoint.as_view(),
name="entity-search",
),
]

View File

@@ -102,10 +102,9 @@ from .cycle.base import (
TransferCycleIssueEndpoint,
CycleAnalyticsEndpoint,
CycleProgressEndpoint,
CycleIssueStateAnalyticsEndpoint,
)
from .cycle.issue import (
CycleIssueViewSet,
)
from .cycle.issue import CycleIssueViewSet
from .cycle.archive import (
CycleArchiveUnarchiveEndpoint,
)
@@ -136,7 +135,7 @@ from .issue.activity import (
IssueActivityEndpoint,
)
from .issue.archive import IssueArchiveViewSet, BulkArchiveIssuesEndpoint
from .issue.archive import IssueArchiveViewSet
from .issue.attachment import (
IssueAttachmentEndpoint,
@@ -203,9 +202,17 @@ from .page.base import (
)
from .page.version import PageVersionEndpoint
from .search.base import (
GlobalSearchEndpoint,
SearchEndpoint,
)
from .search.base import GlobalSearchEndpoint
from .search.issue import IssueSearchEndpoint
from .search.workspace import (
WorkspaceSearchEndpoint,
WorkspaceEntitySearchEndpoint,
)
from .external.base import (
GPTIntegrationEndpoint,
@@ -247,6 +254,28 @@ from .dashboard.base import DashboardEndpoint, WidgetsEndpoint
from .error_404 import custom_404_view
from .importer.base import (
ServiceIssueImportSummaryEndpoint,
ImportServiceEndpoint,
UpdateServiceImportStatusEndpoint,
BulkImportIssuesEndpoint,
BulkImportModulesEndpoint,
)
from .integration.base import (
IntegrationViewSet,
WorkspaceIntegrationViewSet,
)
from .integration.github import (
GithubRepositoriesEndpoint,
GithubRepositorySyncViewSet,
GithubIssueSyncViewSet,
GithubCommentSyncViewSet,
BulkCreateGithubIssueSyncEndpoint,
)
from .integration.slack import SlackProjectSyncViewSet
from .exporter.base import ExportIssuesEndpoint
from .notification.base import MarkAllReadNotificationViewSet
from .user.base import AccountEndpoint, ProfileEndpoint, UserSessionEndpoint

View File

@@ -92,6 +92,7 @@ class ServiceApiTokenEndpoint(BaseAPIView):
api_token = APIToken.objects.filter(
workspace=workspace,
is_service=True,
user=request.user,
).first()
if api_token:
@@ -119,4 +120,3 @@ class ServiceApiTokenEndpoint(BaseAPIView):
},
status=status.HTTP_201_CREATED,
)

View File

@@ -2,11 +2,13 @@
from rest_framework import status
from rest_framework.response import Response
from rest_framework.parsers import MultiPartParser, FormParser, JSONParser
from rest_framework_simplejwt.authentication import JWTAuthentication
# Module imports
from ..base import BaseAPIView, BaseViewSet
from plane.db.models import FileAsset, Workspace
from plane.app.serializers import FileAssetSerializer
from plane.authentication.session import BaseSessionAuthentication
class FileAssetEndpoint(BaseAPIView):
@@ -20,6 +22,8 @@ class FileAssetEndpoint(BaseAPIView):
A viewset for viewing and editing task instances.
"""
authentication_classes = [JWTAuthentication, BaseSessionAuthentication]
def get(self, request, workspace_id, asset_key):
asset_key = str(workspace_id) + "/" + asset_key
files = FileAsset.objects.filter(asset=asset_key)
@@ -55,6 +59,9 @@ class FileAssetEndpoint(BaseAPIView):
class FileAssetViewSet(BaseViewSet):
authentication_classes = [JWTAuthentication, BaseSessionAuthentication]
def restore(self, request, workspace_id, asset_key):
asset_key = str(workspace_id) + "/" + asset_key
file_asset = FileAsset.objects.get(asset=asset_key)

View File

@@ -10,6 +10,7 @@ from django.utils import timezone
from rest_framework import status
from rest_framework.response import Response
from rest_framework.permissions import AllowAny
from rest_framework_simplejwt.authentication import JWTAuthentication
# Module imports
from ..base import BaseAPIView
@@ -23,7 +24,9 @@ from plane.settings.storage import S3Storage
from plane.app.permissions import allow_permission, ROLE
from plane.utils.cache import invalidate_cache_directly
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
from plane.payment.flags.flag_decorator import check_workspace_feature_flag
from plane.payment.flags.flag import FeatureFlag
from plane.authentication.session import BaseSessionAuthentication
class UserAssetsV2Endpoint(BaseAPIView):
"""This endpoint is used to upload user profile images."""
@@ -397,8 +400,20 @@ class WorkspaceFileAssetEndpoint(BaseAPIView):
status=status.HTTP_400_BAD_REQUEST,
)
# Get the size limit
size_limit = min(settings.FILE_SIZE_LIMIT, size)
if entity_type in [
FileAsset.EntityTypeContext.WORKSPACE_LOGO,
FileAsset.EntityTypeContext.PROJECT_COVER,
]:
size_limit = min(size, settings.FILE_SIZE_LIMIT)
else:
if settings.IS_MULTI_TENANT and check_workspace_feature_flag(
feature_key=FeatureFlag.FILE_SIZE_LIMIT_PRO,
slug=slug,
user_id=str(request.user.id),
):
size_limit = min(size, settings.PRO_FILE_SIZE_LIMIT)
else:
size_limit = min(size, settings.FILE_SIZE_LIMIT)
# Get the workspace
workspace = Workspace.objects.get(slug=slug)
@@ -544,6 +559,8 @@ class StaticFileAssetEndpoint(BaseAPIView):
class AssetRestoreEndpoint(BaseAPIView):
"""Endpoint to restore a deleted assets."""
authentication_classes = [JWTAuthentication, BaseSessionAuthentication]
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE")
def post(self, request, slug, asset_id):
asset = FileAsset.all_objects.get(id=asset_id, workspace__slug=slug)
@@ -556,6 +573,8 @@ class AssetRestoreEndpoint(BaseAPIView):
class ProjectAssetEndpoint(BaseAPIView):
"""This endpoint is used to upload cover images/logos etc for workspace, projects and users."""
authentication_classes = [JWTAuthentication, BaseSessionAuthentication]
def get_entity_id_field(self, entity_type, entity_id):
if entity_type == FileAsset.EntityTypeContext.WORKSPACE_LOGO:
return {
@@ -630,8 +649,20 @@ class ProjectAssetEndpoint(BaseAPIView):
status=status.HTTP_400_BAD_REQUEST,
)
# Get the size limit
size_limit = min(settings.FILE_SIZE_LIMIT, size)
if entity_type in [
FileAsset.EntityTypeContext.WORKSPACE_LOGO,
FileAsset.EntityTypeContext.PROJECT_COVER,
]:
size_limit = min(size, settings.FILE_SIZE_LIMIT)
else:
if check_workspace_feature_flag(
feature_key=FeatureFlag.FILE_SIZE_LIMIT_PRO,
slug=slug,
user_id=str(request.user.id),
):
size_limit = min(size, settings.PRO_FILE_SIZE_LIMIT)
else:
size_limit = min(size, settings.FILE_SIZE_LIMIT)
# Get the workspace
workspace = Workspace.objects.get(slug=slug)
@@ -737,7 +768,6 @@ class ProjectAssetEndpoint(BaseAPIView):
class ProjectBulkAssetEndpoint(BaseAPIView):
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
def post(self, request, slug, project_id, entity_id):
asset_ids = request.data.get("asset_ids", [])

View File

@@ -32,6 +32,7 @@ from plane.app.permissions import allow_permission, ROLE
from plane.app.serializers import (
CycleSerializer,
CycleUserPropertiesSerializer,
EntityProgressSerializer,
CycleWriteSerializer,
)
from plane.bgtasks.issue_activities_task import issue_activity
@@ -45,9 +46,16 @@ from plane.db.models import (
User,
Project,
ProjectMember,
Workspace,
)
from plane.ee.models import EntityIssueStateActivity, EntityProgress
from plane.utils.analytics_plot import burndown_plot
from plane.bgtasks.recent_visited_task import recent_visited_task
from plane.bgtasks.entity_issue_state_progress_task import (
track_entity_issue_state_progress,
)
from plane.payment.flags.flag import FeatureFlag
from plane.payment.flags.flag_decorator import check_feature_flag
# Module imports
from .. import BaseAPIView, BaseViewSet
@@ -1001,10 +1009,69 @@ class TransferCycleIssueEndpoint(BaseAPIView):
}
)
cycle_issues = CycleIssue.objects.bulk_update(
_ = CycleIssue.objects.bulk_update(
updated_cycles, ["cycle_id"], batch_size=100
)
estimate_type = Project.objects.filter(
workspace__slug=slug,
pk=project_id,
estimate__isnull=False,
estimate__type="points",
).exists()
if old_cycle.first().version == 2:
EntityIssueStateActivity.objects.bulk_create(
[
EntityIssueStateActivity(
cycle_id=cycle_id,
state_id=cycle_issue.issue.state_id,
issue_id=cycle_issue.issue_id,
state_group=cycle_issue.issue.state.group,
action="REMOVED",
entity_type="CYCLE",
estimate_point_id=cycle_issue.issue.estimate_point_id,
estimate_value=(
cycle_issue.issue.estimate_point.value
if estimate_type
and cycle_issue.issue.estimate_point
else None
),
workspace_id=cycle_issue.workspace_id,
created_by_id=request.user.id,
updated_by_id=request.user.id,
)
for cycle_issue in cycle_issues
],
batch_size=10,
)
if new_cycle.version == 2:
EntityIssueStateActivity.objects.bulk_create(
[
EntityIssueStateActivity(
cycle_id=new_cycle_id,
state_id=cycle_issue.issue.state_id,
issue_id=cycle_issue.issue_id,
state_group=cycle_issue.issue.state.group,
action="ADDED",
entity_type="CYCLE",
estimate_point_id=cycle_issue.issue.estimate_point_id,
estimate_value=(
cycle_issue.issue.estimate_point.value
if estimate_type
and cycle_issue.issue.estimate_point
else None
),
workspace_id=cycle_issue.workspace_id,
created_by_id=request.user.id,
updated_by_id=request.user.id,
)
for cycle_issue in cycle_issues
],
batch_size=10,
)
# Capture Issue Activity
issue_activity.delay(
type="cycle.activity.created",
@@ -1454,3 +1521,31 @@ class CycleAnalyticsEndpoint(BaseAPIView):
},
status=status.HTTP_200_OK,
)
class CycleIssueStateAnalyticsEndpoint(BaseAPIView):
@check_feature_flag(FeatureFlag.ACTIVE_CYCLE_PRO)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
def get(self, request, slug, project_id, cycle_id):
workspace = Workspace.objects.get(slug=slug)
cycle_state_progress = EntityProgress.objects.filter(
cycle_id=cycle_id,
entity_type="CYCLE",
workspace__slug=slug,
).order_by("progress_date")
# Generate today's data
today_data = track_entity_issue_state_progress(
current_date=timezone.now(),
cycles=[(cycle_id, workspace.id)],
save=False,
)
# Combine existing data with today's data
cycle_state_progress = list(cycle_state_progress) + today_data
return Response(
EntityProgressSerializer(cycle_state_progress, many=True).data,
status=status.HTTP_200_OK,
)

View File

@@ -17,19 +17,22 @@ from .. import BaseViewSet
from plane.app.serializers import (
CycleIssueSerializer,
)
from plane.bgtasks.issue_activities_task import issue_activity
from plane.db.models import (
Cycle,
CycleIssue,
Issue,
FileAsset,
CycleIssue,
IssueLink,
Project,
Workspace,
)
from plane.ee.models import EntityIssueStateActivity
from plane.utils.grouper import (
issue_group_values,
issue_on_results,
issue_queryset_grouper,
)
from plane.bgtasks.issue_activities_task import issue_activity
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import (
@@ -274,6 +277,12 @@ class CycleIssueViewSet(BaseViewSet):
]
new_issues = list(set(issues) - set(existing_issues))
# Fetch issue details
issue_objects = Issue.objects.filter(id__in=issues).annotate(
cycle_id=F("issue_cycle__cycle_id")
)
issue_dict = {str(issue.id): issue for issue in issue_objects}
# New issues to create
created_records = CycleIssue.objects.bulk_create(
[
@@ -290,6 +299,63 @@ class CycleIssueViewSet(BaseViewSet):
batch_size=10,
)
estimate_type = Project.objects.filter(
workspace__slug=slug,
pk=project_id,
estimate__isnull=False,
estimate__type="points",
).exists()
if cycle.version == 2:
EntityIssueStateActivity.objects.bulk_create(
[
EntityIssueStateActivity(
cycle_id=cycle_id,
state_id=str(issue_dict[issue_id].state_id),
issue_id=issue_id,
state_group=issue_dict[issue_id].state.group,
action="ADDED",
entity_type="CYCLE",
estimate_point_id=issue_dict[issue_id].estimate_point_id,
estimate_value=(
issue_dict[issue_id].estimate_point.value
if estimate_type
and issue_dict[issue_id].estimate_point
else None
),
workspace_id=cycle.workspace_id,
created_by_id=request.user.id,
updated_by_id=request.user.id,
)
for issue_id in issues
],
batch_size=10,
)
EntityIssueStateActivity.objects.bulk_create(
[
EntityIssueStateActivity(
cycle_id=issue_dict[issue_id].cycle_id,
state_id=str(issue_dict[issue_id].state_id),
issue_id=issue_id,
state_group=issue_dict[issue_id].state.group,
action="REMOVED",
entity_type="CYCLE",
estimate_point_id=issue_dict[issue_id].estimate_point_id,
estimate_value=(
issue_dict[issue_id].estimate_point.value
if estimate_type
and issue_dict[issue_id].estimate_point
else None
),
workspace_id=cycle.workspace_id,
created_by_id=request.user.id,
updated_by_id=request.user.id,
)
for issue_id in existing_issues
]
)
# Updated Issues
updated_records = []
update_cycle_issue_activity = []
@@ -336,12 +402,42 @@ class CycleIssueViewSet(BaseViewSet):
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
def destroy(self, request, slug, project_id, cycle_id, issue_id):
workspace = Workspace.objects.get(slug=slug)
cycle_issue = CycleIssue.objects.filter(
issue_id=issue_id,
workspace__slug=slug,
project_id=project_id,
cycle_id=cycle_id,
)
issue = Issue.objects.get(pk=issue_id)
estimate_type = Project.objects.filter(
workspace__slug=slug,
pk=project_id,
estimate__isnull=False,
estimate__type="points",
).exists()
cycle = Cycle.objects.get(pk=cycle_id)
if cycle.version == 2:
EntityIssueStateActivity.objects.create(
cycle_id=cycle_id,
state_id=issue.state_id,
issue_id=issue_id,
state_group=issue.state.group,
action="REMOVED",
entity_type="CYCLE",
estimate_point_id=issue.estimate_point_id,
estimate_value=(
issue.estimate_point.value
if estimate_type and issue.estimate_point
else None
),
created_by_id=request.user.id,
updated_by_id=request.user.id,
workspace_id=workspace.id,
)
issue_activity.delay(
type="cycle.activity.deleted",
requested_data=json.dumps(

View File

@@ -1,14 +1,17 @@
# Python imports
import random
import string
import json
# Django imports
from django.utils import timezone
from django.db.models import F
# Third party imports
from rest_framework.response import Response
from rest_framework import status
# Module imports
from ..base import BaseViewSet, BaseAPIView
from plane.app.permissions import (
@@ -16,7 +19,8 @@ from plane.app.permissions import (
allow_permission,
ROLE,
)
from plane.db.models import Project, Estimate, EstimatePoint, Issue
from plane.db.models import Project, Estimate, EstimatePoint, Issue, Cycle
from plane.ee.models import EntityIssueStateActivity
from plane.app.serializers import (
EstimateSerializer,
EstimatePointSerializer,
@@ -240,6 +244,44 @@ class EstimatePointEndpoint(BaseViewSet):
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
serializer.save()
# also track the entity state change
if Project.objects.filter(
workspace__slug=slug,
pk=project_id,
estimate__isnull=False,
estimate__type="points",
).exists():
cycle = Cycle.objects.filter(
start_date__lte=timezone.now(),
end_date__gte=timezone.now(),
project_id=project_id,
workspace__slug=slug,
).first()
issues = Issue.objects.annotate(
cycle_id=F("issue_cycle__cycle_id")
).filter(estimate_point_id=estimate_point_id, cycle_id=cycle.id)
if cycle.version == 2:
EntityIssueStateActivity.objects.bulk_create(
[
EntityIssueStateActivity(
cycle_id=cycle.id,
state_id=str(issue.state_id),
issue_id=issue.id,
state_group=issue.state.group,
action="UPDATED",
entity_type="CYCLE",
estimate_point_id=estimate_point_id,
estimate_value=(request.data.get("value")),
workspace_id=issue.workspace_id,
created_by_id=request.user.id,
updated_by_id=request.user.id,
)
for issue in issues
],
batch_size=10,
)
return Response(serializer.data, status=status.HTTP_200_OK)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
@@ -314,6 +356,7 @@ class EstimatePointEndpoint(BaseViewSet):
),
epoch=int(timezone.now().timestamp()),
)
issues.update(estimate_point_id=None)
# delete the estimate point
old_estimate_point = EstimatePoint.objects.filter(
@@ -332,9 +375,61 @@ class EstimatePointEndpoint(BaseViewSet):
["key"],
batch_size=10,
)
# also track the entity state change
if Project.objects.filter(
workspace__slug=slug,
pk=project_id,
estimate__isnull=False,
estimate__type="points",
).exists():
cycle = Cycle.objects.filter(
start_date__lte=timezone.now(),
end_date__gte=timezone.now(),
project_id=project_id,
workspace__slug=slug,
).first()
if cycle and cycle.version == 2:
new_estimate_value = (
EstimatePoint.objects.filter(pk=new_estimate_id)
.values_list("value", flat=True)
.first()
if new_estimate_id
else None
)
issues = Issue.objects.filter(
estimate_point_id=(
new_estimate_id
if new_estimate_id
else estimate_point_id
),
issue_cycle__cycle_id=cycle.id,
)
EntityIssueStateActivity.objects.bulk_create(
[
EntityIssueStateActivity(
cycle_id=cycle.id,
state_id=str(issue.state_id),
issue_id=issue.id,
state_group=issue.state.group,
action="UPDATED",
entity_type="CYCLE",
estimate_point_id=new_estimate_id,
estimate_value=new_estimate_value,
workspace_id=issue.workspace_id,
created_by_id=request.user.id,
updated_by_id=request.user.id,
)
for issue in issues
],
batch_size=10,
)
old_estimate_point.delete()
# TODO: track the issue activity as well if the estimate point is deleted
return Response(
EstimatePointSerializer(updated_estimate_points, many=True).data,
status=status.HTTP_200_OK,

View File

@@ -21,7 +21,6 @@ from plane.license.utils.instance_value import get_configuration_value
class GPTIntegrationEndpoint(BaseAPIView):
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
def post(self, request, slug, project_id):
OPENAI_API_KEY, GPT_ENGINE = get_configuration_value(
@@ -32,7 +31,7 @@ class GPTIntegrationEndpoint(BaseAPIView):
},
{
"key": "GPT_ENGINE",
"default": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"),
"default": os.environ.get("GPT_ENGINE", "gpt-4o-mini"),
},
]
)
@@ -82,7 +81,6 @@ class GPTIntegrationEndpoint(BaseAPIView):
class WorkspaceGPTIntegrationEndpoint(BaseAPIView):
@allow_permission(
allowed_roles=[ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE"
)
@@ -95,7 +93,7 @@ class WorkspaceGPTIntegrationEndpoint(BaseAPIView):
},
{
"key": "GPT_ENGINE",
"default": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"),
"default": os.environ.get("GPT_ENGINE", "gpt-4o-mini"),
},
]
)

View File

@@ -0,0 +1,560 @@
# Python imports
import uuid
# Third party imports
from rest_framework import status
from rest_framework.response import Response
# Django imports
from django.db.models import Max, Q
# Module imports
from plane.app.views import BaseAPIView
from plane.db.models import (
WorkspaceIntegration,
Importer,
APIToken,
Project,
State,
IssueSequence,
Issue,
IssueActivity,
IssueComment,
IssueLink,
IssueLabel,
Workspace,
IssueAssignee,
Module,
ModuleLink,
ModuleIssue,
Label,
)
from plane.app.serializers import (
ImporterSerializer,
IssueFlatSerializer,
ModuleSerializer,
)
from plane.utils.integrations.github import get_github_repo_details
from plane.utils.importers.jira import (
jira_project_issue_summary,
is_allowed_hostname,
)
from plane.bgtasks.importer_task import service_importer
from plane.utils.html_processor import strip_tags
from plane.app.permissions import WorkSpaceAdminPermission
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
def get(self, request, slug, service):
if service == "github":
owner = request.GET.get("owner", False)
repo = request.GET.get("repo", False)
if not owner or not repo:
return Response(
{"error": "Owner and repo are required"},
status=status.HTTP_400_BAD_REQUEST,
)
workspace_integration = WorkspaceIntegration.objects.get(
integration__provider="github", workspace__slug=slug
)
access_tokens_url = workspace_integration.metadata.get(
"access_tokens_url", False
)
if not access_tokens_url:
return Response(
{
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
},
status=status.HTTP_400_BAD_REQUEST,
)
issue_count, labels, collaborators = get_github_repo_details(
access_tokens_url, owner, repo
)
return Response(
{
"issue_count": issue_count,
"labels": labels,
"collaborators": collaborators,
},
status=status.HTTP_200_OK,
)
if service == "jira":
# Check for all the keys
params = {
"project_key": "Project key is required",
"api_token": "API token is required",
"email": "Email is required",
"cloud_hostname": "Cloud hostname is required",
}
for key, error_message in params.items():
if not request.GET.get(key, False):
return Response(
{"error": error_message},
status=status.HTTP_400_BAD_REQUEST,
)
project_key = request.GET.get("project_key", "")
api_token = request.GET.get("api_token", "")
email = request.GET.get("email", "")
cloud_hostname = request.GET.get("cloud_hostname", "")
response = jira_project_issue_summary(
email, api_token, project_key, cloud_hostname
)
if "error" in response:
return Response(response, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(
response,
status=status.HTTP_200_OK,
)
return Response(
{"error": "Service not supported yet"},
status=status.HTTP_400_BAD_REQUEST,
)
class ImportServiceEndpoint(BaseAPIView):
permission_classes = [
WorkSpaceAdminPermission,
]
def post(self, request, slug, service):
project_id = request.data.get("project_id", False)
if not project_id:
return Response(
{"error": "Project ID is required"},
status=status.HTTP_400_BAD_REQUEST,
)
workspace = Workspace.objects.get(slug=slug)
if service == "github":
data = request.data.get("data", False)
metadata = request.data.get("metadata", False)
config = request.data.get("config", False)
if not data or not metadata or not config:
return Response(
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
)
importer = Importer.objects.create(
service=service,
project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
)
service_importer.delay(service, importer.id)
serializer = ImporterSerializer(importer)
return Response(serializer.data, status=status.HTTP_201_CREATED)
if service == "jira":
data = request.data.get("data", False)
metadata = request.data.get("metadata", False)
config = request.data.get("config", False)
cloud_hostname = metadata.get("cloud_hostname", False)
if not cloud_hostname:
return Response(
{"error": "Cloud hostname is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if not is_allowed_hostname(cloud_hostname):
return Response(
{"error": "Hostname is not a valid hostname."},
status=status.HTTP_400_BAD_REQUEST,
)
if not data or not metadata:
return Response(
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
)
importer = Importer.objects.create(
service=service,
project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
)
service_importer.delay(service, importer.id)
serializer = ImporterSerializer(importer)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(
{"error": "Servivce not supported yet"},
status=status.HTTP_400_BAD_REQUEST,
)
def get(self, request, slug):
imports = (
Importer.objects.filter(workspace__slug=slug)
.order_by("-created_at")
.select_related("initiated_by", "project", "workspace")
)
serializer = ImporterSerializer(imports, many=True)
return Response(serializer.data)
def delete(self, request, slug, service, pk):
importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug
)
if importer.imported_data is not None:
# Delete all imported Issues
imported_issues = importer.imported_data.get("issues", [])
Issue.issue_objects.filter(id__in=imported_issues).delete()
# Delete all imported Labels
imported_labels = importer.imported_data.get("labels", [])
Label.objects.filter(id__in=imported_labels).delete()
if importer.service == "jira":
imported_modules = importer.imported_data.get("modules", [])
Module.objects.filter(id__in=imported_modules).delete()
importer.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
def patch(self, request, slug, service, pk):
importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug
)
serializer = ImporterSerializer(
importer, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class UpdateServiceImportStatusEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service, importer_id):
importer = Importer.objects.get(
pk=importer_id,
workspace__slug=slug,
project_id=project_id,
service=service,
)
importer.status = request.data.get("status", "processing")
importer.save()
return Response(status.HTTP_200_OK)
class BulkImportIssuesEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service):
# Get the project
project = Project.objects.get(pk=project_id, workspace__slug=slug)
# Get the default state
default_state = State.objects.filter(
~Q(name="Triage"), project_id=project_id, default=True
).first()
# if there is no default state assign any random state
if default_state is None:
default_state = State.objects.filter(
~Q(name="Triage"), project_id=project_id
).first()
# Get the maximum sequence_id
last_id = IssueSequence.objects.filter(
project_id=project_id
).aggregate(largest=Max("sequence"))["largest"]
last_id = 1 if last_id is None else last_id + 1
# Get the maximum sort order
largest_sort_order = Issue.objects.filter(
project_id=project_id, state=default_state
).aggregate(largest=Max("sort_order"))["largest"]
largest_sort_order = (
65535 if largest_sort_order is None else largest_sort_order + 10000
)
# Get the issues_data
issues_data = request.data.get("issues_data", [])
if not len(issues_data):
return Response(
{"error": "Issue data is required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Issues
bulk_issues = []
for issue_data in issues_data:
bulk_issues.append(
Issue(
project_id=project_id,
workspace_id=project.workspace_id,
state_id=(
issue_data.get("state")
if issue_data.get("state", False)
else default_state.id
),
name=issue_data.get("name", "Issue Created through Bulk"),
description_html=issue_data.get(
"description_html", "<p></p>"
),
description_stripped=(
None
if (
issue_data.get("description_html") == ""
or issue_data.get("description_html") is None
)
else strip_tags(issue_data.get("description_html"))
),
sequence_id=last_id,
sort_order=largest_sort_order,
start_date=issue_data.get("start_date", None),
target_date=issue_data.get("target_date", None),
priority=issue_data.get("priority", "none"),
created_by=request.user,
)
)
largest_sort_order = largest_sort_order + 10000
last_id = last_id + 1
issues = Issue.objects.bulk_create(
bulk_issues,
batch_size=100,
ignore_conflicts=True,
)
# Sequences
_ = IssueSequence.objects.bulk_create(
[
IssueSequence(
issue=issue,
sequence=issue.sequence_id,
project_id=project_id,
workspace_id=project.workspace_id,
)
for issue in issues
],
batch_size=100,
)
# Attach Labels
bulk_issue_labels = []
for issue, issue_data in zip(issues, issues_data):
labels_list = issue_data.get("labels_list", [])
bulk_issue_labels = bulk_issue_labels + [
IssueLabel(
issue=issue,
label_id=label_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for label_id in labels_list
]
_ = IssueLabel.objects.bulk_create(
bulk_issue_labels, batch_size=100, ignore_conflicts=True
)
# Attach Assignees
bulk_issue_assignees = []
for issue, issue_data in zip(issues, issues_data):
assignees_list = issue_data.get("assignees_list", [])
bulk_issue_assignees = bulk_issue_assignees + [
IssueAssignee(
issue=issue,
assignee_id=assignee_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for assignee_id in assignees_list
]
_ = IssueAssignee.objects.bulk_create(
bulk_issue_assignees, batch_size=100, ignore_conflicts=True
)
# Track the issue activities
IssueActivity.objects.bulk_create(
[
IssueActivity(
issue=issue,
actor=request.user,
project_id=project_id,
workspace_id=project.workspace_id,
comment=f"imported the issue from {service}",
verb="created",
created_by=request.user,
)
for issue in issues
],
batch_size=100,
)
# Create Comments
bulk_issue_comments = []
for issue, issue_data in zip(issues, issues_data):
comments_list = issue_data.get("comments_list", [])
bulk_issue_comments = bulk_issue_comments + [
IssueComment(
issue=issue,
comment_html=comment.get("comment_html", "<p></p>"),
actor=request.user,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for comment in comments_list
]
_ = IssueComment.objects.bulk_create(
bulk_issue_comments, batch_size=100
)
# Attach Links
_ = IssueLink.objects.bulk_create(
[
IssueLink(
issue=issue,
url=issue_data.get("link", {}).get(
"url", "https://github.com"
),
title=issue_data.get("link", {}).get(
"title", "Original Issue"
),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for issue, issue_data in zip(issues, issues_data)
]
)
return Response(
{"issues": IssueFlatSerializer(issues, many=True).data},
status=status.HTTP_201_CREATED,
)
class BulkImportModulesEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service):
modules_data = request.data.get("modules_data", [])
project = Project.objects.get(pk=project_id, workspace__slug=slug)
modules = Module.objects.bulk_create(
[
Module(
name=module.get("name", uuid.uuid4().hex),
description=module.get("description", ""),
start_date=module.get("start_date", None),
target_date=module.get("target_date", None),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for module in modules_data
],
batch_size=100,
ignore_conflicts=True,
)
modules = Module.objects.filter(
id__in=[module.id for module in modules]
)
if len(modules) == len(modules_data):
_ = ModuleLink.objects.bulk_create(
[
ModuleLink(
module=module,
url=module_data.get("link", {}).get(
"url", "https://plane.so"
),
title=module_data.get("link", {}).get(
"title", "Original Issue"
),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for module, module_data in zip(modules, modules_data)
],
batch_size=100,
ignore_conflicts=True,
)
bulk_module_issues = []
for module, module_data in zip(modules, modules_data):
module_issues_list = module_data.get("module_issues_list", [])
bulk_module_issues = bulk_module_issues + [
ModuleIssue(
issue_id=issue,
module=module,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for issue in module_issues_list
]
_ = ModuleIssue.objects.bulk_create(
bulk_module_issues, batch_size=100, ignore_conflicts=True
)
serializer = ModuleSerializer(modules, many=True)
return Response(
{"modules": serializer.data}, status=status.HTTP_201_CREATED
)
else:
return Response(
{
"message": "Modules created but issues could not be imported"
},
status=status.HTTP_200_OK,
)

View File

@@ -0,0 +1,9 @@
from .base import IntegrationViewSet, WorkspaceIntegrationViewSet
from .github import (
GithubRepositorySyncViewSet,
GithubIssueSyncViewSet,
BulkCreateGithubIssueSyncEndpoint,
GithubCommentSyncViewSet,
GithubRepositoriesEndpoint,
)
from .slack import SlackProjectSyncViewSet

View File

@@ -0,0 +1,181 @@
# Python improts
import uuid
# Django imports
from django.contrib.auth.hashers import make_password
# Third party imports
from rest_framework.response import Response
from rest_framework import status
# Module imports
from plane.app.views import BaseViewSet
from plane.db.models import (
Integration,
WorkspaceIntegration,
Workspace,
User,
WorkspaceMember,
APIToken,
)
from plane.app.serializers import (
IntegrationSerializer,
WorkspaceIntegrationSerializer,
)
from plane.utils.integrations.github import (
get_github_metadata,
delete_github_installation,
)
from plane.app.permissions import WorkSpaceAdminPermission
from plane.utils.integrations.slack import slack_oauth
class IntegrationViewSet(BaseViewSet):
serializer_class = IntegrationSerializer
model = Integration
def create(self, request):
serializer = IntegrationSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, pk):
integration = Integration.objects.get(pk=pk)
if integration.verified:
return Response(
{"error": "Verified integrations cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = IntegrationSerializer(
integration, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, pk):
integration = Integration.objects.get(pk=pk)
if integration.verified:
return Response(
{"error": "Verified integrations cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class WorkspaceIntegrationViewSet(BaseViewSet):
serializer_class = WorkspaceIntegrationSerializer
model = WorkspaceIntegration
permission_classes = [
WorkSpaceAdminPermission,
]
def get_queryset(self):
return (
super()
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.select_related("integration")
)
def create(self, request, slug, provider):
workspace = Workspace.objects.get(slug=slug)
integration = Integration.objects.get(provider=provider)
config = {}
if provider == "github":
installation_id = request.data.get("installation_id", None)
if not installation_id:
return Response(
{"error": "Installation ID is required"},
status=status.HTTP_400_BAD_REQUEST,
)
metadata = get_github_metadata(installation_id)
config = {"installation_id": installation_id}
if provider == "slack":
code = request.data.get("code", False)
if not code:
return Response(
{"error": "Code is required"},
status=status.HTTP_400_BAD_REQUEST,
)
slack_response = slack_oauth(code=code)
metadata = slack_response
access_token = metadata.get("access_token", False)
team_id = metadata.get("team", {}).get("id", False)
if not metadata or not access_token or not team_id:
return Response(
{
"error": "Slack could not be installed. Please try again later"
},
status=status.HTTP_400_BAD_REQUEST,
)
config = {"team_id": team_id, "access_token": access_token}
# Create a bot user
bot_user = User.objects.create(
email=f"{uuid.uuid4().hex}@plane.so",
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
is_bot=True,
first_name=integration.title,
avatar=(
integration.avatar_url
if integration.avatar_url is not None
else ""
),
)
# Create an API Token for the bot user
api_token = APIToken.objects.create(
user=bot_user,
user_type=1, # bot user
workspace=workspace,
)
workspace_integration = WorkspaceIntegration.objects.create(
workspace=workspace,
integration=integration,
actor=bot_user,
api_token=api_token,
metadata=metadata,
config=config,
)
# Add bot user as a member of workspace
_ = WorkspaceMember.objects.create(
workspace=workspace_integration.workspace,
member=bot_user,
role=20,
)
return Response(
WorkspaceIntegrationSerializer(workspace_integration).data,
status=status.HTTP_201_CREATED,
)
def destroy(self, request, slug, pk):
workspace_integration = WorkspaceIntegration.objects.get(
pk=pk, workspace__slug=slug
)
if workspace_integration.integration.provider == "github":
installation_id = workspace_integration.config.get(
"installation_id", False
)
if installation_id:
delete_github_installation(installation_id=installation_id)
workspace_integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@@ -0,0 +1,201 @@
# Third party imports
from rest_framework import status
from rest_framework.response import Response
# Module imports
from plane.app.views import BaseViewSet, BaseAPIView
from plane.db.models import (
GithubIssueSync,
GithubRepositorySync,
GithubRepository,
WorkspaceIntegration,
ProjectMember,
Label,
GithubCommentSync,
Project,
)
from plane.app.serializers import (
GithubIssueSyncSerializer,
GithubRepositorySyncSerializer,
GithubCommentSyncSerializer,
)
from plane.utils.integrations.github import get_github_repos
from plane.app.permissions import (
ProjectBasePermission,
ProjectEntityPermission,
)
class GithubRepositoriesEndpoint(BaseAPIView):
permission_classes = [
ProjectBasePermission,
]
def get(self, request, slug, workspace_integration_id):
page = request.GET.get("page", 1)
workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id
)
if workspace_integration.integration.provider != "github":
return Response(
{"error": "Not a github integration"},
status=status.HTTP_400_BAD_REQUEST,
)
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
repositories_url = (
workspace_integration.metadata["repositories_url"]
+ f"?per_page=100&page={page}"
)
repositories = get_github_repos(access_tokens_url, repositories_url)
return Response(repositories, status=status.HTTP_200_OK)
class GithubRepositorySyncViewSet(BaseViewSet):
permission_classes = [
ProjectBasePermission,
]
serializer_class = GithubRepositorySyncSerializer
model = GithubRepositorySync
def perform_create(self, serializer):
serializer.save(project_id=self.kwargs.get("project_id"))
def get_queryset(self):
return (
super()
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
)
def create(self, request, slug, project_id, workspace_integration_id):
name = request.data.get("name", False)
url = request.data.get("url", False)
config = request.data.get("config", {})
repository_id = request.data.get("repository_id", False)
owner = request.data.get("owner", False)
if not name or not url or not repository_id or not owner:
return Response(
{"error": "Name, url, repository_id and owner are required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get the workspace integration
workspace_integration = WorkspaceIntegration.objects.get(
pk=workspace_integration_id
)
# Delete the old repository object
GithubRepositorySync.objects.filter(
project_id=project_id, workspace__slug=slug
).delete()
GithubRepository.objects.filter(
project_id=project_id, workspace__slug=slug
).delete()
# Create repository
repo = GithubRepository.objects.create(
name=name,
url=url,
config=config,
repository_id=repository_id,
owner=owner,
project_id=project_id,
)
# Create a Label for github
label = Label.objects.filter(
name="GitHub",
project_id=project_id,
).first()
if label is None:
label = Label.objects.create(
name="GitHub",
project_id=project_id,
description="Label to sync Plane issues with GitHub issues",
color="#003773",
)
# Create repo sync
repo_sync = GithubRepositorySync.objects.create(
repository=repo,
workspace_integration=workspace_integration,
actor=workspace_integration.actor,
credentials=request.data.get("credentials", {}),
project_id=project_id,
label=label,
)
# Add bot as a member in the project
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor, role=20, project_id=project_id
)
# Return Response
return Response(
GithubRepositorySyncSerializer(repo_sync).data,
status=status.HTTP_201_CREATED,
)
class GithubIssueSyncViewSet(BaseViewSet):
permission_classes = [
ProjectEntityPermission,
]
serializer_class = GithubIssueSyncSerializer
model = GithubIssueSync
def perform_create(self, serializer):
serializer.save(
project_id=self.kwargs.get("project_id"),
repository_sync_id=self.kwargs.get("repo_sync_id"),
)
class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
def post(self, request, slug, project_id, repo_sync_id):
project = Project.objects.get(pk=project_id, workspace__slug=slug)
github_issue_syncs = request.data.get("github_issue_syncs", [])
github_issue_syncs = GithubIssueSync.objects.bulk_create(
[
GithubIssueSync(
issue_id=github_issue_sync.get("issue"),
repo_issue_id=github_issue_sync.get("repo_issue_id"),
issue_url=github_issue_sync.get("issue_url"),
github_issue_id=github_issue_sync.get("github_issue_id"),
repository_sync_id=repo_sync_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
updated_by=request.user,
)
for github_issue_sync in github_issue_syncs
],
batch_size=100,
ignore_conflicts=True,
)
serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
return Response(serializer.data, status=status.HTTP_201_CREATED)
class GithubCommentSyncViewSet(BaseViewSet):
permission_classes = [
ProjectEntityPermission,
]
serializer_class = GithubCommentSyncSerializer
model = GithubCommentSync
def perform_create(self, serializer):
serializer.save(
project_id=self.kwargs.get("project_id"),
issue_sync_id=self.kwargs.get("issue_sync_id"),
)

View File

@@ -0,0 +1,95 @@
# Django import
from django.db import IntegrityError
# Third party imports
from rest_framework import status
from rest_framework.response import Response
from sentry_sdk import capture_exception
# Module imports
from plane.app.views import BaseViewSet
from plane.db.models import (
SlackProjectSync,
WorkspaceIntegration,
ProjectMember,
)
from plane.app.serializers import SlackProjectSyncSerializer
from plane.app.permissions import (
ProjectBasePermission,
)
from plane.utils.integrations.slack import slack_oauth
class SlackProjectSyncViewSet(BaseViewSet):
permission_classes = [
ProjectBasePermission,
]
serializer_class = SlackProjectSyncSerializer
model = SlackProjectSync
def get_queryset(self):
return (
super()
.get_queryset()
.filter(
workspace__slug=self.kwargs.get("slug"),
project_id=self.kwargs.get("project_id"),
)
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
)
def create(self, request, slug, project_id, workspace_integration_id):
try:
code = request.data.get("code", False)
if not code:
return Response(
{"error": "Code is required"},
status=status.HTTP_400_BAD_REQUEST,
)
slack_response = slack_oauth(code=code)
workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id
)
workspace_integration = WorkspaceIntegration.objects.get(
pk=workspace_integration_id, workspace__slug=slug
)
slack_project_sync = SlackProjectSync.objects.create(
access_token=slack_response.get("access_token"),
scopes=slack_response.get("scope"),
bot_user_id=slack_response.get("bot_user_id"),
webhook_url=slack_response.get("incoming_webhook", {}).get(
"url"
),
data=slack_response,
team_id=slack_response.get("team", {}).get("id"),
team_name=slack_response.get("team", {}).get("name"),
workspace_integration=workspace_integration,
project_id=project_id,
)
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor,
role=20,
project_id=project_id,
)
serializer = SlackProjectSyncSerializer(slack_project_sync)
return Response(serializer.data, status=status.HTTP_200_OK)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "Slack is already installed for the project"},
status=status.HTTP_410_GONE,
)
capture_exception(e)
return Response(
{
"error": "Slack could not be installed. Please try again later"
},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@@ -29,6 +29,8 @@ from plane.db.models import (
IssueComment,
CommentReaction,
)
from plane.payment.flags.flag_decorator import check_workspace_feature_flag
from plane.payment.flags.flag import FeatureFlag
class IssueActivityEndpoint(BaseAPIView):
@@ -61,6 +63,13 @@ class IssueActivityEndpoint(BaseAPIView):
.filter(**filters)
.select_related("actor", "workspace", "issue", "project")
).order_by("created_at")
if not check_workspace_feature_flag(
feature_key=FeatureFlag.ISSUE_TYPE_DISPLAY,
slug=slug,
):
issue_activities = issue_activities.filter(~Q(field="type"))
issue_comments = (
IssueComment.objects.filter(issue_id=issue_id)
.filter(

View File

@@ -12,12 +12,11 @@ from django.views.decorators.gzip import gzip_page
from rest_framework import status
from rest_framework.response import Response
from plane.app.permissions import (
ProjectEntityPermission,
)
# Module imports
from .. import BaseViewSet
from plane.app.serializers import (
IssueFlatSerializer,
IssueSerializer,
IssueFlatSerializer,
IssueDetailSerializer,
)
from plane.bgtasks.issue_activities_task import issue_activity
@@ -42,9 +41,6 @@ from plane.utils.paginator import (
from plane.app.permissions import allow_permission, ROLE
from plane.utils.error_codes import ERROR_CODES
# Module imports
from .. import BaseViewSet, BaseAPIView
class IssueArchiveViewSet(BaseViewSet):
serializer_class = IssueFlatSerializer
@@ -273,7 +269,10 @@ class IssueArchiveViewSet(BaseViewSet):
if issue.state.group not in ["completed", "cancelled"]:
return Response(
{
"error": "Can only archive completed or cancelled state group issue"
"error_code": ERROR_CODES[
"INVALID_ARCHIVE_STATE_GROUP"
],
"error_message": "INVALID_ARCHIVE_STATE_GROUP",
},
status=status.HTTP_400_BAD_REQUEST,
)
@@ -327,61 +326,3 @@ class IssueArchiveViewSet(BaseViewSet):
issue.save()
return Response(status=status.HTTP_204_NO_CONTENT)
class BulkArchiveIssuesEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
def post(self, request, slug, project_id):
issue_ids = request.data.get("issue_ids", [])
if not len(issue_ids):
return Response(
{"error": "Issue IDs are required"},
status=status.HTTP_400_BAD_REQUEST,
)
issues = Issue.objects.filter(
workspace__slug=slug, project_id=project_id, pk__in=issue_ids
).select_related("state")
bulk_archive_issues = []
for issue in issues:
if issue.state.group not in ["completed", "cancelled"]:
return Response(
{
"error_code": ERROR_CODES[
"INVALID_ARCHIVE_STATE_GROUP"
],
"error_message": "INVALID_ARCHIVE_STATE_GROUP",
},
status=status.HTTP_400_BAD_REQUEST,
)
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps(
{
"archived_at": str(timezone.now().date()),
"automation": False,
}
),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
issue.archived_at = timezone.now().date()
bulk_archive_issues.append(issue)
Issue.objects.bulk_update(bulk_archive_issues, ["archived_at"])
return Response(
{"archived_at": str(timezone.now().date())},
status=status.HTTP_200_OK,
)

View File

@@ -21,6 +21,8 @@ from plane.bgtasks.issue_activities_task import issue_activity
from plane.app.permissions import allow_permission, ROLE
from plane.settings.storage import S3Storage
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
from plane.payment.flags.flag_decorator import check_workspace_feature_flag
from plane.payment.flags.flag import FeatureFlag
class IssueAttachmentEndpoint(BaseAPIView):
@@ -72,7 +74,6 @@ class IssueAttachmentEndpoint(BaseAPIView):
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
return Response(status=status.HTTP_204_NO_CONTENT)
@allow_permission(
@@ -91,7 +92,6 @@ class IssueAttachmentEndpoint(BaseAPIView):
class IssueAttachmentV2Endpoint(BaseAPIView):
serializer_class = IssueAttachmentSerializer
model = FileAsset
@@ -99,7 +99,27 @@ class IssueAttachmentV2Endpoint(BaseAPIView):
def post(self, request, slug, project_id, issue_id):
name = request.data.get("name")
type = request.data.get("type", False)
size = int(request.data.get("size", settings.FILE_SIZE_LIMIT))
size = request.data.get("size")
# Check if the request is valid
if not name or not size:
return Response(
{
"error": "Invalid request.",
"status": False,
},
status=status.HTTP_400_BAD_REQUEST,
)
# Check if the file size is greater than the limit
if check_workspace_feature_flag(
feature_key=FeatureFlag.FILE_SIZE_LIMIT_PRO,
slug=slug,
user_id=str(request.user.id),
):
size_limit = min(size, settings.PRO_FILE_SIZE_LIMIT)
else:
size_limit = min(size, settings.FILE_SIZE_LIMIT)
if not type or type not in settings.ATTACHMENT_MIME_TYPES:
return Response(
@@ -116,9 +136,6 @@ class IssueAttachmentV2Endpoint(BaseAPIView):
# asset key
asset_key = f"{workspace.id}/{uuid.uuid4().hex}-{name}"
# Get the size limit
size_limit = min(size, settings.FILE_SIZE_LIMIT)
# Create a File Asset
asset = FileAsset.objects.create(
attributes={

View File

@@ -44,7 +44,9 @@ from plane.db.models import (
IssueSubscriber,
Project,
ProjectMember,
Cycle,
)
from plane.ee.models import EntityIssueStateActivity
from plane.utils.grouper import (
issue_group_values,
issue_on_results,
@@ -180,6 +182,7 @@ class IssueListEndpoint(BaseAPIView):
"is_draft",
"archived_at",
"deleted_at",
"type_id",
)
datetime_fields = ["created_at", "updated_at"]
issues = user_timezone_converter(
@@ -449,6 +452,7 @@ class IssueViewSet(BaseViewSet):
"is_draft",
"archived_at",
"deleted_at",
"type_id",
)
.first()
)
@@ -630,6 +634,12 @@ class IssueViewSet(BaseViewSet):
current_instance = json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder
)
estimate_type = Project.objects.filter(
workspace__slug=slug,
pk=project_id,
estimate__isnull=False,
estimate__type="points",
).exists()
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
serializer = IssueCreateSerializer(
@@ -648,6 +658,31 @@ class IssueViewSet(BaseViewSet):
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
if issue.cycle_id and (
request.data.get("state_id")
or request.data.get("estimate_point")
):
cycle = Cycle.objects.get(pk=issue.cycle_id)
if cycle.version == 2:
EntityIssueStateActivity.objects.create(
cycle_id=issue.cycle_id,
state_id=issue.state_id,
issue_id=issue.id,
state_group=issue.state.group,
action="UPDATED",
entity_type="CYCLE",
estimate_point_id=issue.estimate_point_id,
estimate_value=(
issue.estimate_point.value
if estimate_type and issue.estimate_point
else None
),
workspace_id=issue.workspace_id,
created_by_id=request.user.id,
updated_by_id=request.user.id,
)
model_activity.delay(
model_name="issue",
model_id=str(serializer.data.get("id", None)),
@@ -854,6 +889,7 @@ class IssuePaginatedViewSet(BaseViewSet):
"link_count",
"attachment_count",
"sub_issues_count",
"type_id",
]
if str(is_description_required).lower() == "true":

View File

@@ -165,6 +165,7 @@ class IssueRelationViewSet(BaseViewSet):
"created_by",
"updated_by",
"relation_type",
"type_id",
]
response_data = {

View File

@@ -149,6 +149,7 @@ class SubIssuesEndpoint(BaseAPIView):
"link_count",
"is_draft",
"archived_at",
"type_id",
)
datetime_fields = ["created_at", "updated_at"]
sub_issues = user_timezone_converter(

View File

@@ -123,3 +123,4 @@ class IssueSubscriberViewSet(BaseViewSet):
return Response(
{"subscribed": issue_subscriber}, status=status.HTTP_200_OK
)

View File

@@ -33,6 +33,7 @@ from plane.db.models import (
ProjectMember,
ProjectPage,
Project,
DeployBoard,
)
from plane.utils.error_codes import ERROR_CODES
from ..base import BaseAPIView, BaseViewSet
@@ -116,6 +117,13 @@ class PageViewSet(BaseViewSet):
),
)
.filter(project=True)
.annotate(
anchor=DeployBoard.objects.filter(
entity_name="page",
entity_identifier=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"),
).values("anchor")
)
.distinct()
)
@@ -434,6 +442,12 @@ class PageViewSet(BaseViewSet):
entity_identifier=pk,
entity_type="page",
).delete()
# Delete the deploy board
DeployBoard.objects.filter(
entity_name="page",
entity_identifier=pk,
workspace__slug=slug,
).delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@@ -55,6 +55,16 @@ from plane.bgtasks.webhook_task import model_activity
from plane.bgtasks.recent_visited_task import recent_visited_task
from plane.utils.exception_logger import log_exception
# EE imports
from plane.ee.models import ProjectState, ProjectAttribute
from plane.ee.utils.workspace_feature import (
WorkspaceFeatureContext,
check_workspace_feature,
)
from plane.ee.serializers.app.project import ProjectAttributeSerializer
from plane.payment.flags.flag_decorator import check_workspace_feature_flag
from plane.payment.flags.flag import FeatureFlag
class ProjectViewSet(BaseViewSet):
serializer_class = ProjectListSerializer
@@ -68,6 +78,14 @@ class ProjectViewSet(BaseViewSet):
workspace__slug=self.kwargs.get("slug"),
is_active=True,
).values("sort_order")
# EE: project_grouping starts
state_id = ProjectAttribute.objects.filter(
workspace__slug=self.kwargs.get("slug"),
project_id=OuterRef("pk"),
).values("state_id")
# EE: project_grouping ends
return self.filter_queryset(
super()
.get_queryset()
@@ -135,6 +153,27 @@ class ProjectViewSet(BaseViewSet):
).values("anchor")
)
.annotate(sort_order=Subquery(sort_order))
# EE: project_grouping starts
.annotate(state_id=Subquery(state_id))
.annotate(
priority=ProjectAttribute.objects.filter(
workspace__slug=self.kwargs.get("slug"),
project_id=OuterRef("pk"),
).values("priority")
)
.annotate(
start_date=ProjectAttribute.objects.filter(
workspace__slug=self.kwargs.get("slug"),
project_id=OuterRef("pk"),
).values("start_date")
)
.annotate(
target_date=ProjectAttribute.objects.filter(
workspace__slug=self.kwargs.get("slug"),
project_id=OuterRef("pk"),
).values("target_date")
)
# EE: project_grouping ends
.prefetch_related(
Prefetch(
"project_projectmember",
@@ -372,6 +411,42 @@ class ProjectViewSet(BaseViewSet):
]
)
# validating the PROJECT_GROUPING feature flag is enabled
if check_workspace_feature_flag(
feature_key=FeatureFlag.PROJECT_GROUPING,
slug=slug,
user_id=str(request.user.id),
default_value=False,
):
# validating the is_project_grouping_enabled workspace feature is enabled
if check_workspace_feature(
slug,
WorkspaceFeatureContext.IS_PROJECT_GROUPING_ENABLED,
):
state_id = request.data.get("state_id", None)
priority = request.data.get("priority", "none")
start_date = request.data.get("start_date", None)
target_date = request.data.get("target_date", None)
if state_id is None:
state_id = (
ProjectState.objects.filter(
workspace=workspace, default=True
)
.values_list("id", flat=True)
.first()
)
# also create project attributes
_ = ProjectAttribute.objects.create(
project_id=serializer.data.get("id"),
state_id=state_id,
priority=priority,
start_date=start_date,
target_date=target_date,
workspace_id=workspace.id,
)
project = (
self.get_queryset()
.filter(pk=serializer.data["id"])
@@ -471,6 +546,34 @@ class ProjectViewSet(BaseViewSet):
is_triage=True,
)
# EE: project_grouping starts
# validating the PROJECT_GROUPING feature flag is enabled
if check_workspace_feature_flag(
feature_key=FeatureFlag.PROJECT_GROUPING,
slug=slug,
user_id=str(request.user.id),
default_value=False,
):
# validating the is_project_grouping_enabled workspace feature is enabled
if check_workspace_feature(
slug,
WorkspaceFeatureContext.IS_PROJECT_GROUPING_ENABLED,
):
project_attribute = ProjectAttribute.objects.filter(
project_id=project.id
).first()
if project_attribute is not None:
project_attribute_serializer = (
ProjectAttributeSerializer(
project_attribute,
data=request.data,
partial=True,
)
)
if project_attribute_serializer.is_valid():
project_attribute_serializer.save()
# EE: project_grouping ends
project = (
self.get_queryset()
.filter(pk=serializer.data["id"])
@@ -549,15 +652,14 @@ class ProjectViewSet(BaseViewSet):
class ProjectArchiveUnarchiveEndpoint(BaseAPIView):
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
def post(self, request, slug, project_id):
project = Project.objects.get(pk=project_id, workspace__slug=slug)
project.archived_at = timezone.now()
project.save()
UserFavorite.objects.filter(
project_id=project_id,
workspace__slug=slug,
project=project_id,
).delete()
return Response(
{"archived_at": str(project.archived_at)},
@@ -702,41 +804,29 @@ class ProjectPublicCoverImagesEndpoint(BaseAPIView):
# Cache the below api for 24 hours
@cache_response(60 * 60 * 24, user=False)
def get(self, request):
files = []
if settings.USE_MINIO:
s3 = boto3.client(
"s3",
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)
else:
s3 = boto3.client(
"s3",
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)
params = {
"Bucket": settings.AWS_STORAGE_BUCKET_NAME,
"Prefix": "static/project-cover/",
}
try:
response = s3.list_objects_v2(**params)
# Extracting file keys from the response
if "Contents" in response:
for content in response["Contents"]:
if not content["Key"].endswith(
"/"
): # This line ensures we're only getting files, not "sub-folders"
files.append(
f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/{content['Key']}"
)
return Response(files, status=status.HTTP_200_OK)
except Exception as e:
log_exception(e)
return Response([], status=status.HTTP_200_OK)
files = [
"https://cover-images.plane.so/project-covers/f2ea49f1-1a23-46c3-99e4-1f6185bff8fc.webp",
"https://cover-images.plane.so/project-covers/0fec1f5e-3a54-4260-beb1-25eb5de8fd87.webp",
"https://cover-images.plane.so/project-covers/05a7e2d0-c846-44df-abc2-99e14043dfb9.webp",
"https://cover-images.plane.so/project-covers/8c561535-6be5-4fb8-8ec1-0cba19507938.webp",
"https://cover-images.plane.so/project-covers/11cde8b7-f051-4a9d-a35e-45b475d757a2.webp",
"https://cover-images.plane.so/project-covers/27b12e3a-5e24-4ea9-b5ac-32caaf81a1c3.webp",
"https://cover-images.plane.so/project-covers/32d808af-650a-4228-9386-253d1a7c2a13.webp",
"https://cover-images.plane.so/project-covers/71dbaf8f-fd3c-4f9a-b342-309cf4f22741.webp",
"https://cover-images.plane.so/project-covers/322a58cb-e019-4477-b3eb-e2679d4a2b47.webp",
"https://cover-images.plane.so/project-covers/061042d0-cf7b-42eb-8fb5-e967b07e9e57.webp",
"https://cover-images.plane.so/project-covers/683b5357-b5f1-42c7-9a87-e7ff6be0eea1.webp",
"https://cover-images.plane.so/project-covers/51495ec3-266f-41e8-9360-589903fd4f56.webp",
"https://cover-images.plane.so/project-covers/1031078f-28d7-496f-b92b-dec3ea83519d.webp",
"https://cover-images.plane.so/project-covers/a65e3aed-4a88-4ecf-a9f7-b74d0e4a1f03.webp",
"https://cover-images.plane.so/project-covers/ab31a6ba-51e2-44ad-a00d-e431b4cf865f.webp",
"https://cover-images.plane.so/project-covers/adb8a78f-da02-4b68-82ca-fa34ce40768b.webp",
"https://cover-images.plane.so/project-covers/c29d7097-12dc-4ae0-a785-582e2ceadc29.webp",
"https://cover-images.plane.so/project-covers/d7a7e86d-fe5b-4256-8625-d1c6a39cdde9.webp",
"https://cover-images.plane.so/project-covers/d27444ac-b76e-4c8f-b272-6a6b00865869.webp",
"https://cover-images.plane.so/project-covers/e7fb2595-987e-4f0c-b251-62d071f501fa.webp",
]
return Response(files, status=status.HTTP_200_OK)
class DeployBoardViewSet(BaseViewSet):

View File

@@ -27,6 +27,7 @@ from plane.db.models import (
WorkspaceMember,
IssueUserProperty,
)
from plane.payment.bgtasks.member_sync_task import member_sync_task
class ProjectInvitationsViewset(BaseViewSet):
@@ -241,6 +242,9 @@ class ProjectJoinEndpoint(BaseAPIView):
workspace_member.is_active = True
workspace_member.save()
# Sync workspace members
member_sync_task.delay(slug)
# Check if the user was already a member of project then activate the user
project_member = ProjectMember.objects.filter(
workspace_id=project_invite.workspace_id, member=user

View File

@@ -22,6 +22,7 @@ from plane.db.models import (
Page,
IssueView,
ProjectPage,
ProjectMember,
)
@@ -90,8 +91,10 @@ class GlobalSearchEndpoint(BaseAPIView):
"project__identifier",
"project_id",
"workspace__slug",
"type_id",
)[:100]
def filter_cycles(self, query, slug, project_id, workspace_search):
fields = ["name"]
q = Q()
@@ -260,3 +263,202 @@ class GlobalSearchEndpoint(BaseAPIView):
func = MODELS_MAPPER.get(model, None)
results[model] = func(query, slug, project_id, workspace_search)
return Response({"results": results}, status=status.HTTP_200_OK)
class SearchEndpoint(BaseAPIView):
def get(self, request, slug, project_id):
query = request.query_params.get("query", False)
query_type = request.query_params.get("query_type", "issue")
count = int(request.query_params.get("count", 5))
if query_type == "mention":
fields = ["member__first_name", "member__last_name"]
q = Q()
if query:
for field in fields:
q |= Q(**{f"{field}__icontains": query})
users = (
ProjectMember.objects.filter(
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project_id=project_id,
workspace__slug=slug,
)
.order_by("-created_at")
.values(
"member__first_name",
"member__last_name",
"member__avatar",
"member__display_name",
"member__id",
)[:count]
)
fields = ["name"]
q = Q()
if query:
for field in fields:
q |= Q(**{f"{field}__icontains": query})
pages = (
Page.objects.filter(
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
workspace__slug=slug,
access=0,
)
.order_by("-created_at")
.values("name", "id")[:count]
)
return Response(
{"users": users, "pages": pages}, status=status.HTTP_200_OK
)
if query_type == "project":
fields = ["name", "identifier"]
q = Q()
if query:
for field in fields:
q |= Q(**{f"{field}__icontains": query})
projects = (
Project.objects.filter(
q,
Q(project_projectmember__member=self.request.user)
| Q(network=2),
workspace__slug=slug,
)
.order_by("-created_at")
.distinct()
.values("name", "id", "identifier", "workspace__slug")[:count]
)
return Response(projects, status=status.HTTP_200_OK)
if query_type == "issue":
fields = ["name", "sequence_id", "project__identifier"]
q = Q()
if query:
for field in fields:
if field == "sequence_id":
# Match whole integers only (exclude decimal numbers)
sequences = re.findall(r"\b\d+\b", query)
for sequence_id in sequences:
q |= Q(**{"sequence_id": sequence_id})
else:
q |= Q(**{f"{field}__icontains": query})
issues = (
Issue.issue_objects.filter(
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
workspace__slug=slug,
project_id=project_id,
)
.order_by("-created_at")
.distinct()
.values(
"name",
"id",
"sequence_id",
"project__identifier",
"project_id",
"priority",
"state_id",
"type_id",
)[:count]
)
return Response(issues, status=status.HTTP_200_OK)
if query_type == "cycle":
fields = ["name"]
q = Q()
if query:
for field in fields:
q |= Q(**{f"{field}__icontains": query})
cycles = (
Cycle.objects.filter(
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
workspace__slug=slug,
)
.order_by("-created_at")
.distinct()
.values(
"name",
"id",
"project_id",
"project__identifier",
"workspace__slug",
)[:count]
)
return Response(cycles, status=status.HTTP_200_OK)
if query_type == "module":
fields = ["name"]
q = Q()
if query:
for field in fields:
q |= Q(**{f"{field}__icontains": query})
modules = (
Module.objects.filter(
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
workspace__slug=slug,
)
.order_by("-created_at")
.distinct()
.values(
"name",
"id",
"project_id",
"project__identifier",
"workspace__slug",
)[:count]
)
return Response(modules, status=status.HTTP_200_OK)
if query_type == "page":
fields = ["name"]
q = Q()
if query:
for field in fields:
q |= Q(**{f"{field}__icontains": query})
pages = (
Page.objects.filter(
q,
projects__project_projectmember__member=self.request.user,
projects__project_projectmember__is_active=True,
projects__id=project_id,
workspace__slug=slug,
access=0,
)
.order_by("-created_at")
.distinct()
.values(
"name",
"id",
"projects__id",
"project__identifier",
"workspace__slug",
)[:count]
)
return Response(pages, status=status.HTTP_200_OK)
return Response(
{"error": "Please provide a valid query"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@@ -1,5 +1,3 @@
# Python imports
# Django imports
from django.db.models import Q
@@ -97,6 +95,7 @@ class IssueSearchEndpoint(BaseAPIView):
"state__name",
"state__group",
"state__color",
"type_id",
)[:100],
status=status.HTTP_200_OK,
)

View File

@@ -0,0 +1,139 @@
# Python imports
import re
# Django imports
from django.db.models import Q
# Third party imports
from rest_framework import status
from rest_framework.response import Response
# Module imports
from plane.app.views import BaseAPIView
from plane.db.models import Workspace, Page, Issue
from plane.app.permissions import WorkspaceEntityPermission
class WorkspaceSearchEndpoint(BaseAPIView):
"""Endpoint to search across multiple fields in the workspace and
also show related workspace if found
"""
permission_classes = [
WorkspaceEntityPermission,
]
def filter_workspaces(self, query, slug):
"""Filter workspaces based on the query"""
fields = ["name"]
q = Q()
for field in fields:
q |= Q(**{f"{field}__icontains": query})
return (
Workspace.objects.filter(
q, workspace_member__member=self.request.user
)
.distinct()
.values("name", "id", "slug")
)
def filter_pages(self, query, slug):
"""Filter pages based on the query"""
fields = ["name"]
q = Q()
for field in fields:
q |= Q(**{f"{field}__icontains": query})
return (
Page.objects.filter(
q,
workspace__slug=slug,
archived_at__isnull=True,
is_global=True,
)
.filter(
Q(
owned_by=self.request.user,
)
| Q(access=0)
)
.distinct()
.values("name", "id", "workspace__slug")
)
def get(self, request, slug):
query = request.GET.get("search", False)
if not query:
return Response(
{"error": "Search query is required"},
status=status.HTTP_400_BAD_REQUEST,
)
MODELS_MAPPER = {
"workspace": self.filter_workspaces,
"page": self.filter_pages,
}
results = {}
for model in MODELS_MAPPER.keys():
func = MODELS_MAPPER.get(model, None)
results[model] = func(query, slug)
return Response({"results": results}, status=status.HTTP_200_OK)
class WorkspaceEntitySearchEndpoint(BaseAPIView):
permission_classes = [
WorkspaceEntityPermission,
]
def filter_issues(self, slug, query, count):
"""Filter issues based on the query"""
fields = ["name", "sequence_id", "project__identifier"]
q = Q()
if query:
for field in fields:
if field == "sequence_id":
# Match whole integers only (exclude decimal numbers)
sequences = re.findall(r"\b\d+\b", query)
for sequence_id in sequences:
q |= Q(**{"sequence_id": sequence_id})
else:
q |= Q(**{f"{field}__icontains": query})
issues = (
Issue.issue_objects.filter(
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
workspace__slug=slug,
)
.order_by("-created_at")
.distinct()
.values(
"name",
"id",
"sequence_id",
"project__identifier",
"project_id",
"priority",
"state_id",
"type_id",
)[:count]
)
return issues
def get(self, request, slug):
query = request.query_params.get("query", False)
query_type = request.query_params.get("query_type", "issue")
count = int(request.query_params.get("count", 5))
MODELS_MAPPER = {
"issue": self.filter_issues,
}
func = MODELS_MAPPER.get(query_type, None)
results = func(slug, query, count)
return Response(results, status=status.HTTP_200_OK)

View File

@@ -40,6 +40,8 @@ from plane.utils.host import base_host
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_control
from django.views.decorators.vary import vary_on_cookie
from plane.payment.bgtasks.member_sync_task import member_sync_task
class UserEndpoint(BaseViewSet):
@@ -171,6 +173,12 @@ class UserEndpoint(BaseViewSet):
workspaces_to_deactivate, ["is_active"], batch_size=100
)
# Sync workspace members
[
member_sync_task.delay(workspace.workspace.slug)
for workspace in workspaces_to_deactivate
]
# Delete all workspace invites
WorkspaceMemberInvite.objects.filter(
email=user.email,

View File

@@ -38,6 +38,7 @@ from plane.db.models import (
WorkspaceMember,
ProjectMember,
Project,
DeployBoard,
)
from plane.utils.grouper import (
issue_group_values,
@@ -450,6 +451,14 @@ class IssueViewViewSet(BaseViewSet):
.select_related("project")
.select_related("workspace")
.annotate(is_favorite=Exists(subquery))
.annotate(
anchor=DeployBoard.objects.filter(
entity_name="view",
entity_identifier=OuterRef("pk"),
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"),
).values("anchor")
)
.order_by("-is_favorite", "name")
.distinct()
)
@@ -577,6 +586,13 @@ class IssueViewViewSet(BaseViewSet):
entity_identifier=pk,
entity_type="view",
).delete()
# Delete the view from the deploy board
DeployBoard.objects.filter(
entity_name="view",
entity_identifier=pk,
project_id=project_id,
workspace__slug=slug,
).delete()
else:
return Response(
{"error": "Only admin or owner can delete the view"},

View File

@@ -2,8 +2,10 @@
import csv
import io
from datetime import date
import requests
from dateutil.relativedelta import relativedelta
# Django imports
from django.db import IntegrityError
from django.db.models import (
Count,
@@ -15,8 +17,6 @@ from django.db.models import (
)
from django.db.models.fields import DateField
from django.db.models.functions import Cast, ExtractDay, ExtractWeek
# Django imports
from django.http import HttpResponse
from django.utils import timezone
@@ -49,6 +49,8 @@ from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_control
from django.views.decorators.vary import vary_on_cookie
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
from plane.payment.bgtasks.member_sync_task import member_sync_task
from django.conf import settings
class WorkSpaceViewSet(BaseViewSet):
@@ -132,6 +134,10 @@ class WorkSpaceViewSet(BaseViewSet):
role=20,
company_role=request.data.get("company_role", ""),
)
# Sync workspace members
member_sync_task.delay(slug)
return Response(
serializer.data, status=status.HTTP_201_CREATED
)
@@ -179,6 +185,33 @@ class WorkSpaceViewSet(BaseViewSet):
)
@allow_permission([ROLE.ADMIN], level="WORKSPACE")
def destroy(self, request, *args, **kwargs):
# Get the workspace
workspace = self.get_object()
# Fetch the workspace subcription
if settings.PAYMENT_SERVER_BASE_URL:
# Make a cancel request to the payment server
response = requests.post(
f"{settings.PAYMENT_SERVER_BASE_URL}/api/subscriptions/check/",
headers={
"content-type": "application/json",
"x-api-key": settings.PAYMENT_SERVER_AUTH_TOKEN,
},
json={"workspace_id": str(workspace.id)},
)
# Check if the response is successful
response.raise_for_status()
# Return the response
response = response.json()
# Check if the response contains the product key
if response.get("subscription_exists"):
return Response(
{"error": "workspace has active subscription"},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# Delete the workspace
return super().destroy(request, *args, **kwargs)
return super().destroy(request, *args, **kwargs)
@@ -400,7 +433,6 @@ class ExportWorkspaceUserActivityEndpoint(BaseAPIView):
return csv_buffer
def post(self, request, slug, user_id):
if not request.data.get("date"):
return Response(
{"error": "Date is required"},

View File

@@ -44,6 +44,7 @@ from plane.db.models import (
from .. import BaseViewSet
from plane.bgtasks.issue_activities_task import issue_activity
from plane.utils.issue_filters import issue_filters
from plane.ee.models import IssuePropertyValue, DraftIssuePropertyValue
class WorkspaceDraftIssueViewSet(BaseViewSet):
@@ -346,6 +347,33 @@ class WorkspaceDraftIssueViewSet(BaseViewSet):
draft_issue_id=None,
)
draft_issue_property_values = (
DraftIssuePropertyValue.objects.filter(draft_issue=draft_issue)
)
IssuePropertyValue.objects.bulk_create(
[
IssuePropertyValue(
workspace_id=draft_issue_property_value.workspace_id,
project_id=draft_issue_property_value.project_id,
issue_id=serializer.data.get("id", None),
property_id=draft_issue_property_value.property_id,
value_text=draft_issue_property_value.value_text,
value_boolean=draft_issue_property_value.value_boolean,
value_decimal=draft_issue_property_value.value_decimal,
value_datetime=draft_issue_property_value.value_datetime,
value_uuid=draft_issue_property_value.value_uuid,
value_option=draft_issue_property_value.value_option,
)
for draft_issue_property_value in draft_issue_property_values
],
batch_size=10,
ignore_conflicts=True,
)
# TODO: Log the activity for issue property
draft_issue_property_values.delete()
# delete the draft issue
draft_issue.delete()

View File

@@ -13,7 +13,6 @@ from plane.app.permissions import allow_permission, ROLE
class WorkspaceFavoriteEndpoint(BaseAPIView):
@allow_permission(
allowed_roles=[ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE"
)
@@ -76,7 +75,6 @@ class WorkspaceFavoriteEndpoint(BaseAPIView):
class WorkspaceFavoriteGroupEndpoint(BaseAPIView):
@allow_permission(
allowed_roles=[ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE"
)

View File

@@ -1,6 +1,6 @@
# Python imports
from datetime import datetime
import uuid
import jwt
# Django imports
@@ -22,7 +22,7 @@ from plane.app.serializers import (
WorkSpaceMemberSerializer,
)
from plane.app.views.base import BaseAPIView
from plane.bgtasks.event_tracking_task import workspace_invite_event
from plane.bgtasks.event_tracking_task import track_event
from plane.bgtasks.workspace_invitation_task import workspace_invitation
from plane.db.models import (
User,
@@ -31,8 +31,9 @@ from plane.db.models import (
WorkspaceMemberInvite,
)
from plane.utils.cache import invalidate_cache, invalidate_cache_directly
from plane.payment.bgtasks.member_sync_task import member_sync_task
from .. import BaseViewSet
from plane.payment.utils.member_payment_count import workspace_member_check
class WorkspaceInvitationsViewset(BaseViewSet):
@@ -103,6 +104,22 @@ class WorkspaceInvitationsViewset(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST,
)
# Get current existing workspace invitations where accepted is False
allowed, _, _ = workspace_member_check(
slug=slug,
requested_invite_list=emails,
requested_role=False,
current_role=False,
)
if not allowed:
return Response(
{
"error": "Reached seat limit - Upgrade to add more members",
},
status=status.HTTP_400_BAD_REQUEST,
)
workspace_invitations = []
for email in emails:
try:
@@ -154,6 +171,28 @@ class WorkspaceInvitationsViewset(BaseViewSet):
status=status.HTTP_200_OK,
)
def partial_update(self, request, slug, pk):
workspace_member_invite = WorkspaceMemberInvite.objects.get(
pk=pk, workspace__slug=slug
)
# Check if the role is being updated
if "role" in request.data:
allowed, _, _ = workspace_member_check(
slug=slug,
requested_role=request.data["role"],
current_role=workspace_member_invite.role,
requested_invite_list=[],
)
if not allowed:
return Response(
{
"error": "You cannot change the role the user as it will exceed the purchased limit"
},
status=status.HTTP_400_BAD_REQUEST,
)
return super().partial_update(request, slug, pk)
def destroy(self, request, slug, pk):
workspace_member_invite = WorkspaceMemberInvite.objects.get(
pk=pk, workspace__slug=slug
@@ -227,15 +266,25 @@ class WorkspaceJoinEndpoint(BaseAPIView):
workspace_invite.delete()
# Send event
workspace_invite_event.delay(
user=user.id if user is not None else None,
track_event.delay(
email=email,
user_agent=request.META.get("HTTP_USER_AGENT"),
ip=request.META.get("REMOTE_ADDR"),
event_name="MEMBER_ACCEPTED",
accepted_from="EMAIL",
properties={
"event_id": uuid.uuid4().hex,
"user": {"email": email, "id": str(user)},
"device_ctx": {
"ip": request.META.get("REMOTE_ADDR", None),
"user_agent": request.META.get(
"HTTP_USER_AGENT", None
),
},
"accepted_from": "EMAIL",
},
)
# sync workspace members
member_sync_task.delay(slug)
return Response(
{"message": "Workspace Invitation Accepted"},
status=status.HTTP_200_OK,
@@ -308,6 +357,12 @@ class UserWorkspaceInvitationsViewSet(BaseViewSet):
ignore_conflicts=True,
)
# Sync workspace members
[
member_sync_task.delay(invitation.workspace.slug)
for invitation in workspace_invitations
]
# Delete joined workspace invites
workspace_invitations.delete()

View File

@@ -7,6 +7,7 @@ from django.db.models import (
Subquery,
IntegerField,
)
from django.utils import timezone
from django.db.models.functions import Coalesce
from django.db.models.functions import Cast
@@ -39,10 +40,12 @@ from plane.db.models import (
Workspace,
WorkspaceMember,
DraftIssue,
Cycle,
)
from plane.utils.cache import cache_response, invalidate_cache
from plane.payment.bgtasks.member_sync_task import member_sync_task
from .. import BaseViewSet
from plane.payment.utils.member_payment_count import workspace_member_check
class WorkSpaceMemberViewSet(BaseViewSet):
@@ -118,12 +121,29 @@ class WorkSpaceMemberViewSet(BaseViewSet):
workspace__slug=slug, member_id=workspace_member.member_id
).update(role=int(request.data.get("role")))
if "role" in request.data:
allowed, _, _ = workspace_member_check(
slug=slug,
requested_role=request.data.get("role"),
current_role=workspace_member.role,
requested_invite_list=[],
)
if not allowed:
return Response(
{
"error": "Cannot update the role as it exceeds the purchased seat limit"
},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = WorkSpaceMemberSerializer(
workspace_member, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
# Sync workspace members
member_sync_task.delay(slug)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -200,6 +220,10 @@ class WorkSpaceMemberViewSet(BaseViewSet):
workspace_member.is_active = False
workspace_member.save()
# Sync workspace members
member_sync_task.delay(slug)
return Response(status=status.HTTP_204_NO_CONTENT)
@invalidate_cache(
@@ -270,6 +294,9 @@ class WorkSpaceMemberViewSet(BaseViewSet):
# # Deactivate the user
workspace_member.is_active = False
workspace_member.save()
# # Sync workspace members
member_sync_task.delay(slug)
return Response(status=status.HTTP_204_NO_CONTENT)
@@ -290,11 +317,26 @@ class WorkspaceMemberUserEndpoint(BaseAPIView):
def get(self, request, slug):
draft_issue_count = (
DraftIssue.objects.filter(
created_by=request.user,
created_by=OuterRef("member"),
workspace_id=OuterRef("workspace_id"),
project__project_projectmember__member=OuterRef("member"),
project__project_projectmember__is_active=True,
)
.values("workspace_id")
.annotate(count=Count("id"))
.annotate(count=Count("id", distinct=True))
.values("count")
)
active_cycles_count = (
Cycle.objects.filter(
workspace__slug=OuterRef("workspace__slug"),
project__project_projectmember__role__gt=5,
project__project_projectmember__member=OuterRef("member"),
project__project_projectmember__is_active=True,
start_date__lte=timezone.now(),
end_date__gte=timezone.now(),
)
.values("workspace__slug")
.annotate(count=Count("id", distinct=True))
.values("count")
)
@@ -307,6 +349,12 @@ class WorkspaceMemberUserEndpoint(BaseAPIView):
Subquery(draft_issue_count, output_field=IntegerField()), 0
)
)
.annotate(
active_cycles_count=Coalesce(
Subquery(active_cycles_count, output_field=IntegerField()),
0,
)
)
.first()
)
serializer = WorkspaceMemberMeSerializer(workspace_member)

View File

@@ -25,10 +25,11 @@ from plane.authentication.utils.host import base_host
class Adapter:
"""Common interface for all auth providers"""
def __init__(self, request, provider, callback=None):
def __init__(self, request, provider, callback=None, is_mobile=False):
self.request = request
self.provider = provider
self.callback = callback
self.is_mobile = is_mobile
self.token_data = None
self.user_data = None
@@ -141,8 +142,19 @@ class Adapter:
# Check if the user is present
user = User.objects.filter(email=email).first()
# Check if sign up case or login
is_signup = bool(user)
is_signup = not bool(user)
# check if the user is authenticated via mobile
if is_signup and self.is_mobile:
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES[
"MOBILE_SIGNUP_DISABLED"
],
error_message="MOBILE_SIGNUP_DISABLED",
)
# If user is not present, create a new user
if not user:
# New user

View File

@@ -40,6 +40,12 @@ AUTHENTICATION_ERROR_CODES = {
"GOOGLE_OAUTH_PROVIDER_ERROR": 5115,
"GITHUB_OAUTH_PROVIDER_ERROR": 5120,
"GITLAB_OAUTH_PROVIDER_ERROR": 5121,
# OIDC
"OIDC_NOT_CONFIGURED": 5190,
"OIDC_PROVIDER_ERROR": 5195,
# SAML
"SAML_NOT_CONFIGURED": 5190,
"SAML_PROVIDER_ERROR": 5195,
# Reset Password
"INVALID_PASSWORD_TOKEN": 5125,
"EXPIRED_PASSWORD_TOKEN": 5130,
@@ -47,7 +53,7 @@ AUTHENTICATION_ERROR_CODES = {
"INCORRECT_OLD_PASSWORD": 5135,
"MISSING_PASSWORD": 5138,
"INVALID_NEW_PASSWORD": 5140,
# set passowrd
# set password
"PASSWORD_ALREADY_SET": 5145,
# Admin
"ADMIN_ALREADY_EXIST": 5150,
@@ -63,11 +69,14 @@ AUTHENTICATION_ERROR_CODES = {
"RATE_LIMIT_EXCEEDED": 5900,
# Unknown
"AUTHENTICATION_FAILED": 5999,
# user not onboarded
"USER_NOT_ONBOARDED": 6000,
"TOKEN_NOT_SET": 6005,
"MOBILE_SIGNUP_DISABLED": 6010,
}
class AuthenticationException(Exception):
error_code = None
error_message = None
payload = {}

View File

@@ -1,5 +1,6 @@
# Python imports
import requests
import os
# Django imports
from django.utils import timezone
@@ -28,8 +29,14 @@ class OauthAdapter(Adapter):
client_secret=None,
code=None,
callback=None,
is_mobile=False,
):
super().__init__(request=request, provider=provider, callback=callback)
super().__init__(
request=request,
provider=provider,
callback=callback,
is_mobile=is_mobile,
)
self.client_id = client_id
self.scope = scope
self.redirect_uri = redirect_uri
@@ -46,6 +53,8 @@ class OauthAdapter(Adapter):
return "GITHUB_OAUTH_PROVIDER_ERROR"
elif self.provider == "gitlab":
return "GITLAB_OAUTH_PROVIDER_ERROR"
elif self.provider == "oidc":
return "OIDC_PROVIDER_ERROR"
else:
return "OAUTH_NOT_CONFIGURED"
@@ -67,7 +76,10 @@ class OauthAdapter(Adapter):
try:
headers = headers or {}
response = requests.post(
self.get_token_url(), data=data, headers=headers
self.get_token_url(),
data=data,
headers=headers,
verify=os.environ.get("SSL_VERIFY", "1") == "1",
)
response.raise_for_status()
return response.json()
@@ -83,7 +95,11 @@ class OauthAdapter(Adapter):
headers = {
"Authorization": f"Bearer {self.token_data.get('access_token')}"
}
response = requests.get(self.get_user_info_url(), headers=headers)
response = requests.get(
self.get_user_info_url(),
headers=headers,
verify=os.environ.get("SSL_VERIFY", "1") == "1",
)
response.raise_for_status()
return response.json()
except requests.RequestException:

View File

@@ -0,0 +1,215 @@
# Python imports
import os
# Django imports
from django.conf import settings
# Third party imports
from onelogin.saml2.auth import OneLogin_Saml2_Auth
# Module imports
from plane.license.utils.instance_value import get_configuration_value
from .base import Adapter
from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.utils.exception_logger import log_exception
class SAMLAdapter(Adapter):
provider = "saml"
auth = None
saml_config = {}
def __init__(
self,
request,
):
(
SAML_ENTITY_ID,
SAML_SSO_URL,
SAML_LOGOUT_URL,
SAML_CERTIFICATE,
) = get_configuration_value(
[
{
"key": "SAML_ENTITY_ID",
"default": os.environ.get("SAML_ENTITY_ID"),
},
{
"key": "SAML_SSO_URL",
"default": os.environ.get("SAML_SSO_URL"),
},
{
"key": "SAML_LOGOUT_URL",
"default": os.environ.get("SAML_LOGOUT_URL"),
},
{
"key": "SAML_CERTIFICATE",
"default": os.environ.get("SAML_CERTIFICATE"),
},
]
)
if not (SAML_ENTITY_ID and SAML_SSO_URL and SAML_CERTIFICATE):
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES["SAML_NOT_CONFIGURED"],
error_message="SAML_NOT_CONFIGURED",
)
super().__init__(request, self.provider)
req = self.prepare_saml_request(self.request)
saml_config = self.generate_saml_configuration(
request=request,
entity_id=SAML_ENTITY_ID,
sso_url=SAML_SSO_URL,
logout_url=SAML_LOGOUT_URL,
idp_certificate=SAML_CERTIFICATE,
)
# Generate configuration
self.saml_config = saml_config
auth = OneLogin_Saml2_Auth(
req,
saml_config,
)
self.auth = auth
def generate_saml_configuration(
self,
request,
entity_id,
sso_url,
logout_url,
idp_certificate,
):
return {
"strict": True,
"debug": settings.DEBUG,
"sp": {
"entityId": f"{request.scheme}://{request.get_host()}/auth/saml/metadata/",
"assertionConsumerService": {
"url": f"{request.scheme}://{request.get_host()}/auth/saml/callback/",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST",
},
},
"idp": {
"entityId": entity_id,
"singleSignOnService": {
"url": sso_url,
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect",
},
"singleLogoutService": {
"url": logout_url,
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect",
},
"x509cert": idp_certificate,
},
"attributeConsumingService": {
"serviceName": "Plane SAML",
"serviceDescription": "Plane SAML",
"requestedAttributes": [
{
"name": "first_name",
"friendlyName": "user.firstName",
"isRequired": False,
"nameFormat": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic",
},
{
"name": "last_name",
"friendlyName": "user.lastName",
"isRequired": False,
"nameFormat": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic",
},
{
"name": "email",
"friendlyName": "user.email",
"isRequired": True,
"nameFormat": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic",
},
],
},
}
def prepare_saml_request(self, request):
return {
"https": "on" if request.is_secure() else "off",
"http_host": request.get_host(),
"script_name": request.path,
"get_data": request.GET.copy(),
"post_data": request.POST.copy(),
}
def get_auth_url(self):
return self.auth.login()
def authenticate(self):
self.auth.process_response()
errors = self.auth.get_errors()
if errors:
if not self.auth.is_authenticated():
# Log the errors
log_exception(Exception(errors))
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES[
"SAML_PROVIDER_ERROR"
],
error_message="SAML_PROVIDER_ERROR",
)
# Log the errors
log_exception(Exception(errors))
raise AuthenticationException(
error_message=AUTHENTICATION_ERROR_CODES[
"SAML_PROVIDER_ERROR"
],
error_code="SAML_PROVIDER_ERROR",
)
attributes = self.auth.get_attributes()
email = (
attributes.get("email")[0]
if attributes.get("email") and len(attributes.get("email"))
else None
)
if not email:
raise AuthenticationException(
error_message=AUTHENTICATION_ERROR_CODES[
"SAML_PROVIDER_ERROR"
],
error_code="SAML_PROVIDER_ERROR",
)
first_name = (
attributes.get("first_name")[0]
if attributes.get("first_name")
and len(attributes.get("first_name"))
else ""
)
last_name = (
attributes.get("last_name")[0]
if attributes.get("last_name") and len(attributes.get("last_name"))
else ""
)
super().set_user_data(
{
"email": email,
"user": {
"first_name": first_name,
"last_name": last_name,
"email": email,
"is_password_autoset": True,
},
}
)
return self.complete_login_or_signup()
def logout(self):
try:
return self.auth.logout()
except Exception:
return False

View File

@@ -6,6 +6,9 @@ from urllib.parse import urlencode
import pytz
import requests
# Django imports
from django.conf import settings
# Module imports
from plane.authentication.adapter.oauth import OauthAdapter
from plane.license.utils.instance_value import get_configuration_value
@@ -16,14 +19,20 @@ from plane.authentication.adapter.error import (
class GitHubOAuthProvider(OauthAdapter):
token_url = "https://github.com/login/oauth/access_token"
userinfo_url = "https://api.github.com/user"
provider = "github"
scope = "read:user user:email"
def __init__(self, request, code=None, state=None, callback=None):
def __init__(
self,
request,
code=None,
state=None,
callback=None,
redirect_uri=None,
is_mobile=False,
):
GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET = get_configuration_value(
[
{
@@ -46,7 +55,20 @@ class GitHubOAuthProvider(OauthAdapter):
client_id = GITHUB_CLIENT_ID
client_secret = GITHUB_CLIENT_SECRET
redirect_uri = f"""{"https" if request.is_secure() else "http"}://{request.get_host()}/auth/github/callback/"""
scheme = (
"https"
if settings.IS_HEROKU
else "https"
if request.is_secure()
else "http"
)
redirect_uri = (
redirect_uri
if redirect_uri
else (f"""{scheme}://{request.get_host()}/auth/github/callback/""")
)
url_params = {
"client_id": client_id,
"redirect_uri": redirect_uri,
@@ -68,6 +90,7 @@ class GitHubOAuthProvider(OauthAdapter):
client_secret,
code,
callback=callback,
is_mobile=is_mobile,
)
def set_token_data(self):

View File

@@ -15,12 +15,12 @@ from plane.authentication.adapter.error import (
class GitLabOAuthProvider(OauthAdapter):
provider = "gitlab"
scope = "read_user"
def __init__(self, request, code=None, state=None, callback=None):
def __init__(
self, request, code=None, state=None, callback=None, is_mobile=False
):
GITLAB_CLIENT_ID, GITLAB_CLIENT_SECRET, GITLAB_HOST = (
get_configuration_value(
[
@@ -76,6 +76,7 @@ class GitLabOAuthProvider(OauthAdapter):
client_secret,
code,
callback=callback,
is_mobile=is_mobile,
)
def set_token_data(self):

View File

@@ -5,6 +5,9 @@ from urllib.parse import urlencode
import pytz
# Django imports
from django.conf import settings
# Module imports
from plane.authentication.adapter.oauth import OauthAdapter
from plane.license.utils.instance_value import get_configuration_value
@@ -20,7 +23,15 @@ class GoogleOAuthProvider(OauthAdapter):
scope = "https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile"
provider = "google"
def __init__(self, request, code=None, state=None, callback=None):
def __init__(
self,
request,
code=None,
state=None,
callback=None,
redirect_uri=None,
is_mobile=False,
):
(GOOGLE_CLIENT_ID, GOOGLE_CLIENT_SECRET) = get_configuration_value(
[
{
@@ -43,7 +54,20 @@ class GoogleOAuthProvider(OauthAdapter):
client_id = GOOGLE_CLIENT_ID
client_secret = GOOGLE_CLIENT_SECRET
redirect_uri = f"""{"https" if request.is_secure() else "http"}://{request.get_host()}/auth/google/callback/"""
scheme = (
"https"
if settings.IS_HEROKU
else "https"
if request.is_secure()
else "http"
)
redirect_uri = (
redirect_uri
if redirect_uri
else (f"""{scheme}://{request.get_host()}/auth/google/callback/""")
)
url_params = {
"client_id": client_id,
"scope": self.scope,
@@ -67,6 +91,7 @@ class GoogleOAuthProvider(OauthAdapter):
client_secret,
code,
callback=callback,
is_mobile=is_mobile,
)
def set_token_data(self):

View File

@@ -0,0 +1,158 @@
# Python imports
import os
from datetime import datetime
from urllib.parse import urlencode
import pytz
# Module imports
from plane.authentication.adapter.oauth import OauthAdapter
from plane.license.utils.instance_value import get_configuration_value
from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.db.models import Account
class OIDCOAuthProvider(OauthAdapter):
provider = "oidc"
scope = "openid email profile"
def __init__(self, request, code=None, state=None, is_mobile=False):
(
OIDC_CLIENT_ID,
OIDC_CLIENT_SECRET,
OIDC_TOKEN_URL,
OIDC_USERINFO_URL,
OIDC_AUTHORIZE_URL,
) = get_configuration_value(
[
{
"key": "OIDC_CLIENT_ID",
"default": os.environ.get("OIDC_CLIENT_ID"),
},
{
"key": "OIDC_CLIENT_SECRET",
"default": os.environ.get("OIDC_CLIENT_SECRET"),
},
{
"key": "OIDC_TOKEN_URL",
"default": os.environ.get("OIDC_TOKEN_URL"),
},
{
"key": "OIDC_USERINFO_URL",
"default": os.environ.get("OIDC_USERINFO_URL"),
},
{
"key": "OIDC_AUTHORIZE_URL",
"default": os.environ.get("OIDC_AUTHORIZE_URL"),
},
]
)
if not (
OIDC_CLIENT_ID
and OIDC_CLIENT_SECRET
and OIDC_TOKEN_URL
and OIDC_USERINFO_URL
and OIDC_AUTHORIZE_URL
):
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES["OIDC_NOT_CONFIGURED"],
error_message="OIDC_NOT_CONFIGURED",
)
redirect_uri = (
f"{request.scheme}://{request.get_host()}/auth/oidc/callback/"
)
url_params = {
"client_id": OIDC_CLIENT_ID,
"response_type": "code",
"redirect_uri": redirect_uri,
"state": state,
"scope": self.scope,
}
auth_url = f"{OIDC_AUTHORIZE_URL}?{urlencode(url_params)}"
super().__init__(
request,
self.provider,
OIDC_CLIENT_ID,
self.scope,
redirect_uri,
auth_url,
OIDC_TOKEN_URL,
OIDC_USERINFO_URL,
OIDC_CLIENT_SECRET,
code,
is_mobile=is_mobile,
)
def set_token_data(self):
data = {
"code": self.code,
"client_id": self.client_id,
"client_secret": self.client_secret,
"redirect_uri": self.redirect_uri,
"grant_type": "authorization_code",
}
token_response = self.get_user_token(
data=data,
headers={"Content-Type": "application/x-www-form-urlencoded"},
)
super().set_token_data(
{
"access_token": token_response.get("access_token"),
"refresh_token": token_response.get("refresh_token", None),
"access_token_expired_at": (
datetime.fromtimestamp(
token_response.get("expires_in"),
tz=pytz.utc,
)
if token_response.get("expires_in")
else None
),
"refresh_token_expired_at": (
datetime.fromtimestamp(
token_response.get("refresh_token_expired_at"),
tz=pytz.utc,
)
if token_response.get("refresh_token_expired_at")
else None
),
"id_token": token_response.get("id_token", ""),
}
)
def set_user_data(self):
user_info_response = self.get_user_response()
user_data = {
"email": user_info_response.get("email"),
"user": {
"avatar": user_info_response.get("picture"),
"first_name": user_info_response.get("given_name"),
"last_name": user_info_response.get("family_name"),
"provider_id": user_info_response.get("sub"),
"is_password_autoset": True,
},
}
super().set_user_data(user_data)
def logout(self, logout_url=None):
(OIDC_LOGOUT_URL,) = get_configuration_value(
[
{
"key": "OIDC_LOGOUT_URL",
"default": os.environ.get("OIDC_LOGOUT_URL"),
},
]
)
account = Account.objects.filter(
user=self.request.user, provider=self.provider
).first()
id_token = account.id_token if account and account.id_token else None
if OIDC_LOGOUT_URL and id_token and logout_url:
return f"{OIDC_LOGOUT_URL}?id_token_hint={id_token}&post_logout_redirect_uri={logout_url}"
else:
return False

Some files were not shown because too many files have changed in this diff Show More