mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Merge branch 'develop' into sync/ce-ee
This commit is contained in:
@@ -31,3 +31,5 @@ USE_MINIO=1
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
|
||||
MONGO_DB_URL="mongodb://plane-mongodb:27017/"
|
||||
466
.github/workflows/build-branch-ee.yml
vendored
Normal file
466
.github/workflows/build-branch-ee.yml
vendored
Normal file
@@ -0,0 +1,466 @@
|
||||
name: Branch Build Enterprise
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
arm64:
|
||||
description: "Build for ARM64 architecture"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- preview
|
||||
release:
|
||||
types: [released, prereleased]
|
||||
|
||||
env:
|
||||
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
|
||||
ARM64_BUILD: ${{ github.event.inputs.arm64 }}
|
||||
|
||||
jobs:
|
||||
branch_build_setup:
|
||||
name: Build Setup
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
outputs:
|
||||
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
build_web: ${{ steps.changed_files.outputs.web_any_changed }}
|
||||
build_admin: ${{ steps.changed_files.outputs.admin_any_changed }}
|
||||
build_space: ${{ steps.changed_files.outputs.space_any_changed }}
|
||||
build_apiserver: ${{ steps.changed_files.outputs.apiserver_any_changed }}
|
||||
build_proxy: ${{ steps.changed_files.outputs.proxy_any_changed }}
|
||||
artifact_upload_to_s3: ${{ steps.set_env_variables.outputs.artifact_upload_to_s3 }}
|
||||
artifact_s3_suffix: ${{ steps.set_env_variables.outputs.artifact_s3_suffix }}
|
||||
|
||||
steps:
|
||||
- id: set_env_variables
|
||||
name: Set Environment Variables
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ] || [ "${{ github.event_name }}" == "release" ] || [ "${{ env.ARM64_BUILD }}" == "true" ] ; then
|
||||
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
BR_NAME=$( echo "${{ env.TARGET_BRANCH }}" | tr / -)
|
||||
echo "TARGET_BRANCH=$BR_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
echo "artifact_upload_to_s3=true" >> $GITHUB_OUTPUT
|
||||
echo "artifact_s3_suffix=${{ github.event.release.tag_name }}" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
echo "artifact_upload_to_s3=true" >> $GITHUB_OUTPUT
|
||||
echo "artifact_s3_suffix=latest" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "preview" ] || [ "${{ env.TARGET_BRANCH }}" == "develop" ]; then
|
||||
echo "artifact_upload_to_s3=true" >> $GITHUB_OUTPUT
|
||||
echo "artifact_s3_suffix=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "artifact_upload_to_s3=false" >> $GITHUB_OUTPUT
|
||||
echo "artifact_s3_suffix=$BR_NAME" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
uses: tj-actions/changed-files@v42
|
||||
with:
|
||||
files_yaml: |
|
||||
apiserver:
|
||||
- apiserver/**
|
||||
proxy:
|
||||
- nginx/**
|
||||
admin:
|
||||
- admin/**
|
||||
- packages/**
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
space:
|
||||
- space/**
|
||||
- packages/**
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
web:
|
||||
- web/**
|
||||
- packages/**
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
|
||||
branch_build_push_admin:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_admin== 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Admin Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
ADMIN_TAG: makeplane/admin-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Admin Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/admin-enterprise:stable
|
||||
TAG=${TAG},makeplane/admin-enterprise:${{ github.event.release.tag_name }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/admin-enterprise:stable
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/admin-enterprise:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/admin-enterprise:latest
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/admin-enterprise:latest
|
||||
else
|
||||
TAG=${{ env.ADMIN_TAG }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/admin-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
fi
|
||||
echo "ADMIN_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Harbor
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
password: ${{ secrets.HARBOR_TOKEN }}
|
||||
registry: ${{ vars.HARBOR_REGISTRY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Frontend to Docker Container Registry
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./admin/Dockerfile.admin
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.ADMIN_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_web:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_web == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Web Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
WEB_TAG: makeplane/web-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Web Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/web-enterprise:stable
|
||||
TAG=${TAG},makeplane/web-enterprise:${{ github.event.release.tag_name }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/web-enterprise:stable
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/web-enterprise:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/web-enterprise:latest
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/web-enterprise:latest
|
||||
else
|
||||
TAG=${{ env.WEB_TAG }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/web-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
fi
|
||||
echo "WEB_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Harbor
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
password: ${{ secrets.HARBOR_TOKEN }}
|
||||
registry: ${{ vars.HARBOR_REGISTRY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Web to Docker Container Registry
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./web/Dockerfile.web
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.WEB_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_space:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Space Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
SPACE_TAG: makeplane/space-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Space Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/space-enterprise:stable
|
||||
TAG=${TAG},makeplane/space-enterprise:${{ github.event.release.tag_name }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/space-enterprise:stable
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/space-enterprise:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/space-enterprise:latest
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/space-enterprise:latest
|
||||
else
|
||||
TAG=${{ env.SPACE_TAG }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/space-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
fi
|
||||
echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Harbor
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
password: ${{ secrets.HARBOR_TOKEN }}
|
||||
registry: ${{ vars.HARBOR_REGISTRY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Space to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./space/Dockerfile.space
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.SPACE_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_apiserver:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_apiserver == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push API Server Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BACKEND_TAG: makeplane/backend-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Backend Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/backend-enterprise:stable
|
||||
TAG=${TAG},makeplane/backend-enterprise:${{ github.event.release.tag_name }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/backend-enterprise:stable
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/backend-enterprise:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/backend-enterprise:latest
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/backend-enterprise:latest
|
||||
else
|
||||
TAG=${{ env.BACKEND_TAG }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/backend-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
fi
|
||||
echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Harbor
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
password: ${{ secrets.HARBOR_TOKEN }}
|
||||
registry: ${{ vars.HARBOR_REGISTRY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Backend to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: ./apiserver
|
||||
file: ./apiserver/Dockerfile.api
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
push: true
|
||||
tags: ${{ env.BACKEND_TAG }}
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_proxy:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Proxy Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
PROXY_TAG: makeplane/proxy-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Proxy Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/proxy-enterprise:stable
|
||||
TAG=${TAG},makeplane/proxy-enterprise:${{ github.event.release.tag_name }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/proxy-enterprise:stable
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/proxy-enterprise:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/proxy-enterprise:latest
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/proxy-enterprise:latest
|
||||
else
|
||||
TAG=${{ env.PROXY_TAG }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/proxy-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
fi
|
||||
echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Harbor
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
password: ${{ secrets.HARBOR_TOKEN }}
|
||||
registry: ${{ vars.HARBOR_REGISTRY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Plane-Proxy to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: ./nginx
|
||||
file: ./nginx/Dockerfile
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.PROXY_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
upload_artifacts_s3:
|
||||
if: ${{ needs.branch_build_setup.outputs.artifact_upload_to_s3 == 'true' }}
|
||||
name: Upload artifacts to S3 Bucket
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
container:
|
||||
image: docker:20.10.7
|
||||
credentials:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
env:
|
||||
ARTIFACT_SUFFIX: ${{ needs.branch_build_setup.outputs.artifact_s3_suffix }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.SELF_HOST_BUCKET_ACCESS_KEY }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.SELF_HOST_BUCKET_SECRET_KEY }}
|
||||
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
|
||||
steps:
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Upload artifacts
|
||||
run: |
|
||||
apk update
|
||||
apk add --no-cache aws-cli
|
||||
|
||||
mkdir -p ~/${{ env.ARTIFACT_SUFFIX }}
|
||||
|
||||
cp deploy/cli-install/variables.env ~/${{ env.ARTIFACT_SUFFIX }}/variables.env
|
||||
cp deploy/cli-install/Caddyfile ~/${{ env.ARTIFACT_SUFFIX }}/Caddyfile
|
||||
sed -e 's@${APP_RELEASE_VERSION}@'${{ env.ARTIFACT_SUFFIX }}'@' deploy/cli-install/docker-compose.yml > ~/${{ env.ARTIFACT_SUFFIX }}/docker-compose.yml
|
||||
sed -e 's@${APP_RELEASE_VERSION}@'${{ env.ARTIFACT_SUFFIX }}'@' deploy/cli-install/docker-compose-caddy.yml > ~/${{ env.ARTIFACT_SUFFIX }}/docker-compose-caddy.yml
|
||||
|
||||
aws s3 cp ~/${{ env.ARTIFACT_SUFFIX }} s3://${{ vars.SELF_HOST_BUCKET_NAME }}/plane-enterprise/${{ env.ARTIFACT_SUFFIX }} --recursive
|
||||
|
||||
rm -rf ~/${{ env.ARTIFACT_SUFFIX }}
|
||||
70
.github/workflows/create-release.yml
vendored
Normal file
70
.github/workflows/create-release.yml
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
name: Manual Release Workflow
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_tag:
|
||||
description: 'Release Tag (e.g., v0.16-cannary-1)'
|
||||
required: true
|
||||
prerelease:
|
||||
description: 'Pre-Release'
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
draft:
|
||||
description: 'Draft'
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
create-release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 # Necessary to fetch all history for tags
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config user.name "github-actions"
|
||||
git config user.email "github-actions@github.com"
|
||||
|
||||
- name: Check for the Prerelease
|
||||
run: |
|
||||
echo ${{ github.event.release.prerelease }}
|
||||
|
||||
- name: Generate Release Notes
|
||||
id: generate_notes
|
||||
run: |
|
||||
bash ./generate_release_notes.sh
|
||||
# Directly use the content of RELEASE_NOTES.md for the release body
|
||||
RELEASE_NOTES=$(cat RELEASE_NOTES.md)
|
||||
echo "RELEASE_NOTES<<EOF" >> $GITHUB_ENV
|
||||
echo "$RELEASE_NOTES" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
- name: Create Tag
|
||||
run: |
|
||||
git tag ${{ github.event.inputs.release_tag }}
|
||||
git push origin ${{ github.event.inputs.release_tag }}
|
||||
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
tag_name: ${{ github.event.inputs.release_tag }}
|
||||
body_path: RELEASE_NOTES.md
|
||||
draft: ${{ github.event.inputs.draft }}
|
||||
prerelease: ${{ github.event.inputs.prerelease }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -4,3 +4,7 @@ export * from "./authentication-method-card";
|
||||
export * from "./gitlab-config";
|
||||
export * from "./github-config";
|
||||
export * from "./google-config";
|
||||
|
||||
// enterprise
|
||||
export * from "./oidc-config";
|
||||
export * from "./saml-config";
|
||||
|
||||
72
admin/app/authentication/components/oidc-config.tsx
Normal file
72
admin/app/authentication/components/oidc-config.tsx
Normal file
@@ -0,0 +1,72 @@
|
||||
"use client";
|
||||
|
||||
import React from "react";
|
||||
import { observer } from "mobx-react-lite";
|
||||
import Link from "next/link";
|
||||
// icons
|
||||
import { Settings2 } from "lucide-react";
|
||||
// types
|
||||
import { TInstanceEnterpriseAuthenticationMethodKeys } from "@plane/types";
|
||||
// ui
|
||||
import { ToggleSwitch, getButtonStyling } from "@plane/ui";
|
||||
// helpers
|
||||
import { cn } from "@/helpers/common.helper";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
|
||||
type Props = {
|
||||
disabled: boolean;
|
||||
updateConfig: (
|
||||
key: TInstanceEnterpriseAuthenticationMethodKeys,
|
||||
value: string
|
||||
) => void;
|
||||
};
|
||||
|
||||
export const OIDCConfiguration: React.FC<Props> = observer((props) => {
|
||||
const { disabled, updateConfig } = props;
|
||||
// store
|
||||
const { formattedConfig } = useInstance();
|
||||
// derived values
|
||||
const enableOIDCConfig = formattedConfig?.IS_OIDC_ENABLED ?? "";
|
||||
const isOIDCConfigured =
|
||||
!!formattedConfig?.OIDC_CLIENT_ID && !!formattedConfig?.OIDC_CLIENT_SECRET;
|
||||
|
||||
return (
|
||||
<>
|
||||
{isOIDCConfigured ? (
|
||||
<div className="flex items-center gap-4">
|
||||
<Link
|
||||
href="/authentication/oidc"
|
||||
className={cn(
|
||||
getButtonStyling("link-primary", "md"),
|
||||
"font-medium"
|
||||
)}
|
||||
>
|
||||
Edit
|
||||
</Link>
|
||||
<ToggleSwitch
|
||||
value={Boolean(parseInt(enableOIDCConfig))}
|
||||
onChange={() => {
|
||||
Boolean(parseInt(enableOIDCConfig)) === true
|
||||
? updateConfig("IS_OIDC_ENABLED", "0")
|
||||
: updateConfig("IS_OIDC_ENABLED", "1");
|
||||
}}
|
||||
size="sm"
|
||||
disabled={disabled}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<Link
|
||||
href="/authentication/oidc"
|
||||
className={cn(
|
||||
getButtonStyling("neutral-primary", "sm"),
|
||||
"text-custom-text-300"
|
||||
)}
|
||||
>
|
||||
<Settings2 className="h-4 w-4 p-0.5 text-custom-text-300/80" />
|
||||
Configure
|
||||
</Link>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
});
|
||||
72
admin/app/authentication/components/saml-config.tsx
Normal file
72
admin/app/authentication/components/saml-config.tsx
Normal file
@@ -0,0 +1,72 @@
|
||||
"use client";
|
||||
|
||||
import React from "react";
|
||||
import { observer } from "mobx-react-lite";
|
||||
import Link from "next/link";
|
||||
// icons
|
||||
import { Settings2 } from "lucide-react";
|
||||
// types
|
||||
import { TInstanceEnterpriseAuthenticationMethodKeys } from "@plane/types";
|
||||
// ui
|
||||
import { ToggleSwitch, getButtonStyling } from "@plane/ui";
|
||||
// helpers
|
||||
import { cn } from "@/helpers/common.helper";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
|
||||
type Props = {
|
||||
disabled: boolean;
|
||||
updateConfig: (
|
||||
key: TInstanceEnterpriseAuthenticationMethodKeys,
|
||||
value: string
|
||||
) => void;
|
||||
};
|
||||
|
||||
export const SAMLConfiguration: React.FC<Props> = observer((props) => {
|
||||
const { disabled, updateConfig } = props;
|
||||
// store
|
||||
const { formattedConfig } = useInstance();
|
||||
// derived values
|
||||
const enableSAMLConfig = formattedConfig?.IS_SAML_ENABLED ?? "";
|
||||
const isSAMLConfigured =
|
||||
!!formattedConfig?.SAML_ENTITY_ID && !!formattedConfig?.SAML_CERTIFICATE;
|
||||
|
||||
return (
|
||||
<>
|
||||
{isSAMLConfigured ? (
|
||||
<div className="flex items-center gap-4">
|
||||
<Link
|
||||
href="/authentication/saml"
|
||||
className={cn(
|
||||
getButtonStyling("link-primary", "md"),
|
||||
"font-medium"
|
||||
)}
|
||||
>
|
||||
Edit
|
||||
</Link>
|
||||
<ToggleSwitch
|
||||
value={Boolean(parseInt(enableSAMLConfig))}
|
||||
onChange={() => {
|
||||
Boolean(parseInt(enableSAMLConfig)) === true
|
||||
? updateConfig("IS_SAML_ENABLED", "0")
|
||||
: updateConfig("IS_SAML_ENABLED", "1");
|
||||
}}
|
||||
size="sm"
|
||||
disabled={disabled}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<Link
|
||||
href="/authentication/saml"
|
||||
className={cn(
|
||||
getButtonStyling("neutral-primary", "sm"),
|
||||
"text-custom-text-300"
|
||||
)}
|
||||
>
|
||||
<Settings2 className="h-4 w-4 p-0.5 text-custom-text-300/80" />
|
||||
Configure
|
||||
</Link>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
});
|
||||
222
admin/app/authentication/oidc/form.tsx
Normal file
222
admin/app/authentication/oidc/form.tsx
Normal file
@@ -0,0 +1,222 @@
|
||||
import { FC, useState } from "react";
|
||||
import Link from "next/link";
|
||||
import { useForm } from "react-hook-form";
|
||||
// types
|
||||
import { IFormattedInstanceConfiguration, TInstanceOIDCAuthenticationConfigurationKeys } from "@plane/types";
|
||||
// ui
|
||||
import { Button, TOAST_TYPE, getButtonStyling, setToast } from "@plane/ui";
|
||||
// components
|
||||
import {
|
||||
ConfirmDiscardModal,
|
||||
ControllerInput,
|
||||
TControllerInputFormField,
|
||||
CopyField,
|
||||
TCopyField,
|
||||
} from "@/components/common";
|
||||
// helpers
|
||||
import { cn } from "@/helpers/common.helper";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
|
||||
type Props = {
|
||||
config: IFormattedInstanceConfiguration;
|
||||
};
|
||||
|
||||
type OIDCConfigFormValues = Record<TInstanceOIDCAuthenticationConfigurationKeys, string>;
|
||||
|
||||
export const InstanceOIDCConfigForm: FC<Props> = (props) => {
|
||||
const { config } = props;
|
||||
// states
|
||||
const [isDiscardChangesModalOpen, setIsDiscardChangesModalOpen] = useState(false);
|
||||
// store hooks
|
||||
const { updateInstanceConfigurations } = useInstance();
|
||||
// form data
|
||||
const {
|
||||
handleSubmit,
|
||||
control,
|
||||
reset,
|
||||
formState: { errors, isDirty, isSubmitting },
|
||||
} = useForm<OIDCConfigFormValues>({
|
||||
defaultValues: {
|
||||
OIDC_CLIENT_ID: config["OIDC_CLIENT_ID"],
|
||||
OIDC_CLIENT_SECRET: config["OIDC_CLIENT_SECRET"],
|
||||
OIDC_TOKEN_URL: config["OIDC_TOKEN_URL"],
|
||||
OIDC_USERINFO_URL: config["OIDC_USERINFO_URL"],
|
||||
OIDC_AUTHORIZE_URL: config["OIDC_AUTHORIZE_URL"],
|
||||
OIDC_LOGOUT_URL: config["OIDC_LOGOUT_URL"],
|
||||
OIDC_PROVIDER_NAME: config["OIDC_PROVIDER_NAME"],
|
||||
},
|
||||
});
|
||||
|
||||
const originURL = typeof window !== "undefined" ? window.location.origin : "";
|
||||
|
||||
const OIDC_FORM_FIELDS: TControllerInputFormField[] = [
|
||||
{
|
||||
key: "OIDC_CLIENT_ID",
|
||||
type: "text",
|
||||
label: "Client ID",
|
||||
description: "A unique ID for this Plane app that you register on your IdP",
|
||||
placeholder: "abc123xyz789",
|
||||
error: Boolean(errors.OIDC_CLIENT_ID),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "OIDC_CLIENT_SECRET",
|
||||
type: "password",
|
||||
label: "Client secret",
|
||||
description: "The secret key that authenticates this Plane app to your IdP",
|
||||
placeholder: "s3cr3tK3y123!",
|
||||
error: Boolean(errors.OIDC_CLIENT_SECRET),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "OIDC_AUTHORIZE_URL",
|
||||
type: "text",
|
||||
label: "Authorize URL",
|
||||
description: "The URL that brings up your IdP's authentication screen when your users click `Sign in with <name of IdP>`",
|
||||
placeholder: "https://example.com/",
|
||||
error: Boolean(errors.OIDC_AUTHORIZE_URL),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "OIDC_TOKEN_URL",
|
||||
type: "text",
|
||||
label: "Token URL",
|
||||
description: "The URL that talks to the IdP and persists user authentication on Plane",
|
||||
placeholder: "https://example.com/oauth/token",
|
||||
error: Boolean(errors.OIDC_TOKEN_URL),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "OIDC_USERINFO_URL",
|
||||
type: "text",
|
||||
label: "Users' info URL",
|
||||
description: "The URL that fetches your users' info from your IdP",
|
||||
placeholder: "https://example.com/userinfo",
|
||||
error: Boolean(errors.OIDC_USERINFO_URL),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "OIDC_LOGOUT_URL",
|
||||
type: "text",
|
||||
label: "Logout URL",
|
||||
description: "Optional field that controls where your users go after they log out of Plane",
|
||||
placeholder: "https://example.com/logout",
|
||||
error: Boolean(errors.OIDC_LOGOUT_URL),
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
key: "OIDC_PROVIDER_NAME",
|
||||
type: "text",
|
||||
label: "IdP's name",
|
||||
description: "Optional field for the name that your users see on the `Sign in with` button",
|
||||
placeholder: "Okta",
|
||||
error: Boolean(errors.OIDC_PROVIDER_NAME),
|
||||
required: false,
|
||||
},
|
||||
];
|
||||
|
||||
const OIDC_SERVICE_DETAILS: TCopyField[] = [
|
||||
{
|
||||
key: "Origin_URI",
|
||||
label: "Origin URI",
|
||||
url: `${originURL}/auth/oidc/`,
|
||||
description: "We will generate this for this Plane app. Add this as a trusted origin on your IdP's corresponding field.",
|
||||
},
|
||||
{
|
||||
key: "Callback_URI",
|
||||
label: "Callback URI",
|
||||
url: `${originURL}/auth/oidc/callback/`,
|
||||
description:
|
||||
"We will generate this for you. Add this in the `Sign-in redirect URI` field of your IdP.",
|
||||
},
|
||||
{
|
||||
key: "Logout_URI",
|
||||
label: "Logout URI",
|
||||
url: `${originURL}/auth/oidc/logout/`,
|
||||
description: "We will generate this for you. Add this in the `Logout redirect URI` field of your IdP.",
|
||||
},
|
||||
];
|
||||
|
||||
const onSubmit = async (formData: OIDCConfigFormValues) => {
|
||||
const payload: Partial<OIDCConfigFormValues> = { ...formData };
|
||||
|
||||
await updateInstanceConfigurations(payload)
|
||||
.then((response = []) => {
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Done!",
|
||||
message: "Your OIDC-based authentication is configured. You should test it now.",
|
||||
});
|
||||
reset({
|
||||
OIDC_CLIENT_ID: response.find((item) => item.key === "OIDC_CLIENT_ID")?.value,
|
||||
OIDC_CLIENT_SECRET: response.find((item) => item.key === "OIDC_CLIENT_SECRET")?.value,
|
||||
OIDC_AUTHORIZE_URL: response.find((item) => item.key === "OIDC_AUTHORIZE_URL")?.value,
|
||||
OIDC_TOKEN_URL: response.find((item) => item.key === "OIDC_TOKEN_URL")?.value,
|
||||
OIDC_USERINFO_URL: response.find((item) => item.key === "OIDC_USERINFO_URL")?.value,
|
||||
OIDC_LOGOUT_URL: response.find((item) => item.key === "OIDC_LOGOUT_URL")?.value,
|
||||
OIDC_PROVIDER_NAME: response.find((item) => item.key === "OIDC_PROVIDER_NAME")?.value,
|
||||
});
|
||||
})
|
||||
.catch((err) => console.error(err));
|
||||
};
|
||||
|
||||
const handleGoBack = (e: React.MouseEvent<HTMLAnchorElement, MouseEvent>) => {
|
||||
if (isDirty) {
|
||||
e.preventDefault();
|
||||
setIsDiscardChangesModalOpen(true);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<ConfirmDiscardModal
|
||||
isOpen={isDiscardChangesModalOpen}
|
||||
onDiscardHref="/authentication"
|
||||
handleClose={() => setIsDiscardChangesModalOpen(false)}
|
||||
/>
|
||||
<div className="flex flex-col gap-8">
|
||||
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1">
|
||||
<div className="pt-2 text-xl font-medium">IdP-provided details for Plane</div>
|
||||
{OIDC_FORM_FIELDS.map((field) => (
|
||||
<ControllerInput
|
||||
key={field.key}
|
||||
control={control}
|
||||
type={field.type}
|
||||
name={field.key}
|
||||
label={field.label}
|
||||
description={field.description}
|
||||
placeholder={field.placeholder}
|
||||
error={field.error}
|
||||
required={field.required}
|
||||
/>
|
||||
))}
|
||||
<div className="flex flex-col gap-1 pt-4">
|
||||
<div className="flex items-center gap-4">
|
||||
<Button variant="primary" onClick={handleSubmit(onSubmit)} loading={isSubmitting} disabled={!isDirty}>
|
||||
{isSubmitting ? "Saving..." : "Save changes"}
|
||||
</Button>
|
||||
<Link
|
||||
href="/authentication"
|
||||
className={cn(getButtonStyling("link-neutral", "md"), "font-medium")}
|
||||
onClick={handleGoBack}
|
||||
>
|
||||
Go back
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="col-span-2 md:col-span-1">
|
||||
<div className="flex flex-col gap-y-4 px-6 py-4 my-2 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Plane-provided details for your IdP</div>
|
||||
{OIDC_SERVICE_DETAILS.map((field) => (
|
||||
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
102
admin/app/authentication/oidc/page.tsx
Normal file
102
admin/app/authentication/oidc/page.tsx
Normal file
@@ -0,0 +1,102 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import Image from "next/image";
|
||||
import { observer } from "mobx-react-lite";
|
||||
import useSWR from "swr";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// ui
|
||||
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
|
||||
// components
|
||||
import { PageHeader } from "@/components/core";
|
||||
import { AuthenticationMethodCard } from "../components";
|
||||
import { InstanceOIDCConfigForm } from "./form";
|
||||
// icons
|
||||
import OIDCLogo from "/public/logos/oidc-logo.png";
|
||||
|
||||
const InstanceOIDCAuthenticationPage = observer(() => {
|
||||
// store
|
||||
const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
|
||||
// state
|
||||
const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
|
||||
// config
|
||||
const enableOIDCConfig = formattedConfig?.IS_OIDC_ENABLED ?? "";
|
||||
|
||||
useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
|
||||
|
||||
const updateConfig = async (key: "IS_OIDC_ENABLED", value: string) => {
|
||||
setIsSubmitting(true);
|
||||
|
||||
const payload = {
|
||||
[key]: value,
|
||||
};
|
||||
|
||||
const updateConfigPromise = updateInstanceConfigurations(payload);
|
||||
|
||||
setPromiseToast(updateConfigPromise, {
|
||||
loading: "Saving Configuration...",
|
||||
success: {
|
||||
title: "Configuration saved",
|
||||
message: () => `OIDC authentication is now ${value ? "active" : "disabled"}.`,
|
||||
},
|
||||
error: {
|
||||
title: "Error",
|
||||
message: () => "Failed to save configuration",
|
||||
},
|
||||
});
|
||||
|
||||
await updateConfigPromise
|
||||
.then(() => {
|
||||
setIsSubmitting(false);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
setIsSubmitting(false);
|
||||
});
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Authentication - God Mode" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<AuthenticationMethodCard
|
||||
name="OIDC"
|
||||
description="Authenticate your users via the OpenID connect protocol."
|
||||
icon={<Image src={OIDCLogo} height={24} width={24} alt="OIDC Logo" />}
|
||||
config={
|
||||
<ToggleSwitch
|
||||
value={Boolean(parseInt(enableOIDCConfig))}
|
||||
onChange={() => {
|
||||
Boolean(parseInt(enableOIDCConfig)) === true
|
||||
? updateConfig("IS_OIDC_ENABLED", "0")
|
||||
: updateConfig("IS_OIDC_ENABLED", "1");
|
||||
}}
|
||||
size="sm"
|
||||
disabled={isSubmitting || !formattedConfig}
|
||||
/>
|
||||
}
|
||||
disabled={isSubmitting || !formattedConfig}
|
||||
withBorder={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md p-4">
|
||||
{formattedConfig ? (
|
||||
<InstanceOIDCConfigForm config={formattedConfig} />
|
||||
) : (
|
||||
<Loader className="space-y-8">
|
||||
<Loader.Item height="50px" width="25%" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" width="50%" />
|
||||
</Loader>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
});
|
||||
|
||||
export default InstanceOIDCAuthenticationPage;
|
||||
@@ -10,7 +10,6 @@ import { TInstanceConfigurationKeys } from "@plane/types";
|
||||
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
|
||||
// components
|
||||
import { PageHeader } from "@/components/core";
|
||||
// hooks
|
||||
// helpers
|
||||
import { cn, resolveGeneralTheme } from "@/helpers/common.helper";
|
||||
import { useInstance } from "@/hooks/store";
|
||||
@@ -19,6 +18,9 @@ import githubLightModeImage from "@/public/logos/github-black.png";
|
||||
import githubDarkModeImage from "@/public/logos/github-white.png";
|
||||
import GitlabLogo from "@/public/logos/gitlab-logo.svg";
|
||||
import GoogleLogo from "@/public/logos/google-logo.svg";
|
||||
// images - enterprise
|
||||
import OIDCLogo from "@/public/logos/oidc-logo.png";
|
||||
import SAMLLogo from "@/public/logos/saml-logo.svg";
|
||||
// local components
|
||||
import {
|
||||
AuthenticationMethodCard,
|
||||
@@ -27,6 +29,9 @@ import {
|
||||
GitlabConfiguration,
|
||||
GithubConfiguration,
|
||||
GoogleConfiguration,
|
||||
// enterprise
|
||||
OIDCConfiguration,
|
||||
SAMLConfiguration,
|
||||
} from "./components";
|
||||
|
||||
type TInstanceAuthenticationMethodCard = {
|
||||
@@ -127,6 +132,24 @@ const InstanceAuthenticationPage = observer(() => {
|
||||
},
|
||||
];
|
||||
|
||||
// Enterprise authentication methods
|
||||
authenticationMethodsCard.push(
|
||||
{
|
||||
key: "oidc",
|
||||
name: "OIDC",
|
||||
description: "Authenticate your users via the OpenID connect protocol.",
|
||||
icon: <Image src={OIDCLogo} height={20} width={20} alt="OIDC Logo" />,
|
||||
config: <OIDCConfiguration disabled={isSubmitting} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "saml",
|
||||
name: "SAML",
|
||||
description: "Authenticate your users via Security Assertion Markup Language protocol.",
|
||||
icon: <Image src={SAMLLogo} height={24} width={24} alt="SAML Logo" className="pb-0.5 pl-0.5" />,
|
||||
config: <SAMLConfiguration disabled={isSubmitting} updateConfig={updateConfig} />,
|
||||
}
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Authentication - God Mode" />
|
||||
|
||||
226
admin/app/authentication/saml/form.tsx
Normal file
226
admin/app/authentication/saml/form.tsx
Normal file
@@ -0,0 +1,226 @@
|
||||
import { FC, useState } from "react";
|
||||
import Link from "next/link";
|
||||
import { Controller, useForm } from "react-hook-form";
|
||||
// types
|
||||
import { IFormattedInstanceConfiguration, TInstanceSAMLAuthenticationConfigurationKeys } from "@plane/types";
|
||||
// ui
|
||||
import { Button, TOAST_TYPE, TextArea, getButtonStyling, setToast } from "@plane/ui";
|
||||
// components
|
||||
import {
|
||||
ConfirmDiscardModal,
|
||||
ControllerInput,
|
||||
TControllerInputFormField,
|
||||
CopyField,
|
||||
TCopyField,
|
||||
} from "@/components/common";
|
||||
// helpers
|
||||
import { cn } from "@/helpers/common.helper";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
|
||||
type Props = {
|
||||
config: IFormattedInstanceConfiguration;
|
||||
};
|
||||
|
||||
type SAMLConfigFormValues = Record<TInstanceSAMLAuthenticationConfigurationKeys, string>;
|
||||
|
||||
export const InstanceSAMLConfigForm: FC<Props> = (props) => {
|
||||
const { config } = props;
|
||||
// states
|
||||
const [isDiscardChangesModalOpen, setIsDiscardChangesModalOpen] = useState(false);
|
||||
// store hooks
|
||||
const { updateInstanceConfigurations } = useInstance();
|
||||
// form data
|
||||
const {
|
||||
handleSubmit,
|
||||
control,
|
||||
reset,
|
||||
formState: { errors, isDirty, isSubmitting },
|
||||
} = useForm<SAMLConfigFormValues>({
|
||||
defaultValues: {
|
||||
SAML_ENTITY_ID: config["SAML_ENTITY_ID"],
|
||||
SAML_SSO_URL: config["SAML_SSO_URL"],
|
||||
SAML_LOGOUT_URL: config["SAML_LOGOUT_URL"],
|
||||
SAML_CERTIFICATE: config["SAML_CERTIFICATE"],
|
||||
SAML_PROVIDER_NAME: config["SAML_PROVIDER_NAME"],
|
||||
},
|
||||
});
|
||||
|
||||
const originURL = typeof window !== "undefined" ? window.location.origin : "";
|
||||
|
||||
const SAML_FORM_FIELDS: TControllerInputFormField[] = [
|
||||
{
|
||||
key: "SAML_ENTITY_ID",
|
||||
type: "text",
|
||||
label: "Entity ID",
|
||||
description: "A unique ID for this Plane app that you register on your IdP",
|
||||
placeholder: "70a44354520df8bd9bcd",
|
||||
error: Boolean(errors.SAML_ENTITY_ID),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "SAML_SSO_URL",
|
||||
type: "text",
|
||||
label: "SSO URL",
|
||||
description: "The URL that brings up your IdP's authentication screen when your users click `Sign in with <name of IdP>`",
|
||||
placeholder: "https://example.com/sso",
|
||||
error: Boolean(errors.SAML_SSO_URL),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "SAML_LOGOUT_URL",
|
||||
type: "text",
|
||||
label: "Logout URL",
|
||||
description: "Optional field that tells your IdP your users have logged out of this Plane app",
|
||||
placeholder: "https://example.com/logout",
|
||||
error: Boolean(errors.SAML_LOGOUT_URL),
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
key: "SAML_PROVIDER_NAME",
|
||||
type: "text",
|
||||
label: "IdP's name",
|
||||
description: "Optional field for the name that your users see on the `Sign in with` button",
|
||||
placeholder: "Okta",
|
||||
error: Boolean(errors.SAML_PROVIDER_NAME),
|
||||
required: false,
|
||||
},
|
||||
];
|
||||
|
||||
const SAML_SERVICE_DETAILS: TCopyField[] = [
|
||||
{
|
||||
key: "Metadata_Information",
|
||||
label: "Entity ID | Audience | Metadata information",
|
||||
url: `${originURL}/auth/saml/metadata/`,
|
||||
description:
|
||||
"We will generate this bit of the metadata that identifies this Plane app as an authorized service on your IdP.",
|
||||
},
|
||||
{
|
||||
key: "Callback_URI",
|
||||
label: "Callback URI",
|
||||
url: `${originURL}/auth/saml/callback/`,
|
||||
description:
|
||||
"We will generate this `http-post request` URL that you should paste into your `ACS URL` or `Sign-in call back URL` field on your IdP.",
|
||||
},
|
||||
{
|
||||
key: "Logout_URI",
|
||||
label: "Logout URI",
|
||||
url: `${originURL}/auth/saml/logout/`,
|
||||
description: "We will generate this `http-redirect request` URL that you should paste into your `SLS URL` or `Logout URL` field on your IdP.",
|
||||
},
|
||||
];
|
||||
|
||||
const onSubmit = async (formData: SAMLConfigFormValues) => {
|
||||
const payload: Partial<SAMLConfigFormValues> = { ...formData };
|
||||
|
||||
await updateInstanceConfigurations(payload)
|
||||
.then((response = []) => {
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Done!",
|
||||
message: "Your SAML-based authentication is configured. You should test it now.",
|
||||
});
|
||||
reset({
|
||||
SAML_ENTITY_ID: response.find((item) => item.key === "SAML_ENTITY_ID")?.value,
|
||||
SAML_SSO_URL: response.find((item) => item.key === "SAML_SSO_URL")?.value,
|
||||
SAML_LOGOUT_URL: response.find((item) => item.key === "SAML_LOGOUT_URL")?.value,
|
||||
SAML_CERTIFICATE: response.find((item) => item.key === "SAML_CERTIFICATE")?.value,
|
||||
SAML_PROVIDER_NAME: response.find((item) => item.key === "SAML_PROVIDER_NAME")?.value,
|
||||
});
|
||||
})
|
||||
.catch((err) => console.error(err));
|
||||
};
|
||||
|
||||
const handleGoBack = (e: React.MouseEvent<HTMLAnchorElement, MouseEvent>) => {
|
||||
if (isDirty) {
|
||||
e.preventDefault();
|
||||
setIsDiscardChangesModalOpen(true);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<ConfirmDiscardModal
|
||||
isOpen={isDiscardChangesModalOpen}
|
||||
onDiscardHref="/authentication"
|
||||
handleClose={() => setIsDiscardChangesModalOpen(false)}
|
||||
/>
|
||||
<div className="flex flex-col gap-8">
|
||||
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1">
|
||||
<div className="pt-2 text-xl font-medium">IdP-provided details for Plane</div>
|
||||
{SAML_FORM_FIELDS.map((field) => (
|
||||
<ControllerInput
|
||||
key={field.key}
|
||||
control={control}
|
||||
type={field.type}
|
||||
name={field.key}
|
||||
label={field.label}
|
||||
description={field.description}
|
||||
placeholder={field.placeholder}
|
||||
error={field.error}
|
||||
required={field.required}
|
||||
/>
|
||||
))}
|
||||
<div className="flex flex-col gap-1">
|
||||
<h4 className="text-sm">SAML certificate</h4>
|
||||
<Controller
|
||||
control={control}
|
||||
name="SAML_CERTIFICATE"
|
||||
rules={{ required: "Certificate is required." }}
|
||||
render={({ field: { value, onChange } }) => (
|
||||
<TextArea
|
||||
id="SAML_CERTIFICATE"
|
||||
name="SAML_CERTIFICATE"
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
hasError={Boolean(errors.SAML_CERTIFICATE)}
|
||||
placeholder="---BEGIN CERTIFICATE---\n2yWn1gc7DhOFB9\nr0gbE+\n---END CERTIFICATE---"
|
||||
className="min-h-[102px] w-full rounded-md font-medium text-sm"
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
<p className="text-xs text-custom-text-400">
|
||||
IdP-generated certificate for signing this Plane app as an authorized service provider for your IdP
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex flex-col gap-1 pt-4">
|
||||
<div className="flex items-center gap-4">
|
||||
<Button variant="primary" onClick={handleSubmit(onSubmit)} loading={isSubmitting} disabled={!isDirty}>
|
||||
{isSubmitting ? "Saving..." : "Save changes"}
|
||||
</Button>
|
||||
<Link
|
||||
href="/authentication"
|
||||
className={cn(getButtonStyling("link-neutral", "md"), "font-medium")}
|
||||
onClick={handleGoBack}
|
||||
>
|
||||
Go back
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="col-span-2 md:col-span-1">
|
||||
<div className="flex flex-col gap-y-4 px-6 py-4 my-2 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Service provider details</div>
|
||||
{SAML_SERVICE_DETAILS.map((field) => (
|
||||
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
|
||||
))}
|
||||
<div className="flex flex-col gap-1">
|
||||
<h4 className="text-sm text-custom-text-200 font-medium">Name ID format</h4>
|
||||
<p className="text-sm text-custom-text-100">emailAddress</p>
|
||||
</div>
|
||||
<div className="flex flex-col gap-1">
|
||||
<h4 className="text-sm text-custom-text-200 font-medium">Attribute mapping</h4>
|
||||
<ul className="text-sm text-custom-text-100 list-disc pl-6">
|
||||
<li>first_name to user.firstName</li>
|
||||
<li>last_name to user.lastName</li>
|
||||
<li>email to user.email</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
102
admin/app/authentication/saml/page.tsx
Normal file
102
admin/app/authentication/saml/page.tsx
Normal file
@@ -0,0 +1,102 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { observer } from "mobx-react-lite";
|
||||
import Image from "next/image";
|
||||
import useSWR from "swr";
|
||||
// ui
|
||||
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
|
||||
// components
|
||||
import { PageHeader } from "@/components/core";
|
||||
import { AuthenticationMethodCard } from "../components";
|
||||
import { InstanceSAMLConfigForm } from "./form";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// icons
|
||||
import SAMLLogo from "/public/logos/saml-logo.svg";
|
||||
|
||||
const InstanceSAMLAuthenticationPage = observer(() => {
|
||||
// store
|
||||
const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
|
||||
// state
|
||||
const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
|
||||
// config
|
||||
const enableSAMLConfig = formattedConfig?.IS_SAML_ENABLED ?? "";
|
||||
|
||||
useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
|
||||
|
||||
const updateConfig = async (key: "IS_SAML_ENABLED", value: string) => {
|
||||
setIsSubmitting(true);
|
||||
|
||||
const payload = {
|
||||
[key]: value,
|
||||
};
|
||||
|
||||
const updateConfigPromise = updateInstanceConfigurations(payload);
|
||||
|
||||
setPromiseToast(updateConfigPromise, {
|
||||
loading: "Saving Configuration...",
|
||||
success: {
|
||||
title: "Configuration saved",
|
||||
message: () => `SAML authentication is now ${value ? "active" : "disabled"}.`,
|
||||
},
|
||||
error: {
|
||||
title: "Error",
|
||||
message: () => "Failed to save configuration",
|
||||
},
|
||||
});
|
||||
|
||||
await updateConfigPromise
|
||||
.then(() => {
|
||||
setIsSubmitting(false);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
setIsSubmitting(false);
|
||||
});
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Authentication - God Mode" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<AuthenticationMethodCard
|
||||
name="SAML"
|
||||
description="Authenticate your users via Security Assertion Markup Language
|
||||
protocol."
|
||||
icon={<Image src={SAMLLogo} height={26} width={26} alt="SAML Logo" className="pb-1 pl-0.5" />}
|
||||
config={
|
||||
<ToggleSwitch
|
||||
value={Boolean(parseInt(enableSAMLConfig))}
|
||||
onChange={() => {
|
||||
Boolean(parseInt(enableSAMLConfig)) === true
|
||||
? updateConfig("IS_SAML_ENABLED", "0")
|
||||
: updateConfig("IS_SAML_ENABLED", "1");
|
||||
}}
|
||||
size="sm"
|
||||
disabled={isSubmitting || !formattedConfig}
|
||||
/>
|
||||
}
|
||||
disabled={isSubmitting || !formattedConfig}
|
||||
withBorder={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md p-4">
|
||||
{formattedConfig ? (
|
||||
<InstanceSAMLConfigForm config={formattedConfig} />
|
||||
) : (
|
||||
<Loader className="space-y-8">
|
||||
<Loader.Item height="50px" width="25%" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" width="50%" />
|
||||
</Loader>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
});
|
||||
|
||||
export default InstanceSAMLAuthenticationPage;
|
||||
BIN
admin/public/logos/oidc-logo.png
Normal file
BIN
admin/public/logos/oidc-logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 20 KiB |
16
admin/public/logos/saml-logo.svg
Normal file
16
admin/public/logos/saml-logo.svg
Normal file
@@ -0,0 +1,16 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
||||
<svg width="800px" height="800px" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg" fill="none">
|
||||
|
||||
<g fill="#C22E33">
|
||||
|
||||
<path d="M7.754 2l.463.41c.343.304.687.607 1.026.915C11.44 5.32 13.3 7.565 14.7 10.149c.072.132.137.268.202.403l.098.203-.108.057-.081-.115-.21-.299-.147-.214c-1.019-1.479-2.04-2.96-3.442-4.145a6.563 6.563 0 00-1.393-.904c-1.014-.485-1.916-.291-2.69.505-.736.757-1.118 1.697-1.463 2.653-.045.123-.092.245-.139.367l-.082.215-.172-.055c.1-.348.192-.698.284-1.049.21-.795.42-1.59.712-2.356.31-.816.702-1.603 1.093-2.39.169-.341.338-.682.5-1.025h.092z"/>
|
||||
|
||||
<path d="M8.448 11.822c-1.626.77-5.56 1.564-7.426 1.36C.717 11.576 3.71 4.05 5.18 2.91l-.095.218a4.638 4.638 0 01-.138.303l-.066.129c-.76 1.462-1.519 2.926-1.908 4.53a7.482 7.482 0 00-.228 1.689c-.01 1.34.824 2.252 2.217 2.309.67.027 1.347-.043 2.023-.114.294-.03.587-.061.88-.084.108-.008.214-.021.352-.039l.231-.028z"/>
|
||||
|
||||
<path d="M3.825 14.781c-.445.034-.89.068-1.333.108 4.097.39 8.03-.277 11.91-1.644-1.265-2.23-2.97-3.991-4.952-5.522.026.098.084.169.141.239l.048.06c.17.226.348.448.527.67.409.509.818 1.018 1.126 1.578.778 1.42.356 2.648-1.168 3.296-1.002.427-2.097.718-3.18.892-1.03.164-2.075.243-3.119.323z"/>
|
||||
|
||||
</g>
|
||||
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
@@ -20,7 +20,8 @@ RUN apk --no-cache add \
|
||||
"make~=4.3" \
|
||||
"postgresql13-dev~=13" \
|
||||
"libc-dev" \
|
||||
"linux-headers"
|
||||
"linux-headers" \
|
||||
"xmlsec-dev"
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
|
||||
18
apiserver/bin/docker-entrypoint-api-cloud.sh
Executable file
18
apiserver/bin/docker-entrypoint-api-cloud.sh
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
export SKIP_ENV_VAR=0
|
||||
|
||||
python manage.py wait_for_db
|
||||
# Wait for migrations
|
||||
python manage.py wait_for_migrations
|
||||
|
||||
# Clear Cache before starting to remove stale values
|
||||
python manage.py clear_cache
|
||||
|
||||
# Register instance if INSTANCE_ADMIN_EMAIL is set
|
||||
if [ -n "$INSTANCE_ADMIN_EMAIL" ]; then
|
||||
python manage.py setup_instance $INSTANCE_ADMIN_EMAIL
|
||||
fi
|
||||
|
||||
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||
39
apiserver/bin/docker-entrypoint-api-ee.sh
Executable file
39
apiserver/bin/docker-entrypoint-api-ee.sh
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
python manage.py wait_for_db
|
||||
# Wait for migrations
|
||||
python manage.py wait_for_migrations
|
||||
|
||||
# Create the default bucket
|
||||
#!/bin/bash
|
||||
|
||||
# Collect system information
|
||||
HOSTNAME=$(hostname)
|
||||
MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
|
||||
CPU_INFO=$(cat /proc/cpuinfo)
|
||||
MEMORY_INFO=$(free -h)
|
||||
DISK_INFO=$(df -h)
|
||||
|
||||
# Concatenate information and compute SHA-256 hash
|
||||
SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
|
||||
|
||||
# Export the variables
|
||||
MACHINE_SIGNATURE=${MACHINE_SIGNATURE:-$SIGNATURE}
|
||||
export SKIP_ENV_VAR=1
|
||||
|
||||
# License check
|
||||
python manage.py license_check
|
||||
|
||||
# Register instance
|
||||
python manage.py register_instance_ee "$MACHINE_SIGNATURE"
|
||||
|
||||
# Load the configuration variable
|
||||
python manage.py configure_instance
|
||||
|
||||
# Create the default bucket
|
||||
python manage.py create_bucket
|
||||
|
||||
# Clear Cache before starting to remove stale values
|
||||
python manage.py clear_cache
|
||||
|
||||
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||
@@ -32,4 +32,4 @@ python manage.py create_bucket
|
||||
# Clear Cache before starting to remove stale values
|
||||
python manage.py clear_cache
|
||||
|
||||
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||
@@ -17,7 +17,7 @@ from plane.api.serializers import (
|
||||
CycleSerializer,
|
||||
)
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
CycleIssue,
|
||||
|
||||
@@ -13,7 +13,7 @@ from rest_framework.response import Response
|
||||
# Module imports
|
||||
from plane.api.serializers import InboxIssueSerializer, IssueSerializer
|
||||
from plane.app.permissions import ProjectLitePermission
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Inbox,
|
||||
InboxIssue,
|
||||
|
||||
@@ -36,7 +36,7 @@ from plane.app.permissions import (
|
||||
ProjectLitePermission,
|
||||
ProjectMemberPermission,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueActivity,
|
||||
|
||||
@@ -18,7 +18,7 @@ from plane.api.serializers import (
|
||||
ModuleSerializer,
|
||||
)
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueAttachment,
|
||||
|
||||
8
apiserver/plane/app/authentication/session.py
Normal file
8
apiserver/plane/app/authentication/session.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from rest_framework.authentication import SessionAuthentication
|
||||
|
||||
|
||||
class BaseSessionAuthentication(SessionAuthentication):
|
||||
|
||||
# Disable csrf for the rest apis
|
||||
def enforce_csrf(self, request):
|
||||
return
|
||||
@@ -40,6 +40,9 @@ from .view import (
|
||||
GlobalViewSerializer,
|
||||
IssueViewSerializer,
|
||||
)
|
||||
|
||||
from .active_cycle import ActiveCycleSerializer
|
||||
|
||||
from .cycle import (
|
||||
CycleSerializer,
|
||||
CycleIssueSerializer,
|
||||
@@ -92,6 +95,8 @@ from .page import (
|
||||
PageLogSerializer,
|
||||
SubPageSerializer,
|
||||
PageDetailSerializer,
|
||||
WorkspacePageSerializer,
|
||||
WorkspacePageDetailSerializer,
|
||||
)
|
||||
|
||||
from .estimate import (
|
||||
@@ -121,3 +126,13 @@ from .exporter import ExporterHistorySerializer
|
||||
from .webhook import WebhookSerializer, WebhookLogSerializer
|
||||
|
||||
from .dashboard import DashboardSerializer, WidgetSerializer
|
||||
|
||||
from .integration import (
|
||||
IntegrationSerializer,
|
||||
WorkspaceIntegrationSerializer,
|
||||
GithubIssueSyncSerializer,
|
||||
GithubRepositorySerializer,
|
||||
GithubRepositorySyncSerializer,
|
||||
GithubCommentSyncSerializer,
|
||||
SlackProjectSyncSerializer,
|
||||
)
|
||||
|
||||
70
apiserver/plane/app/serializers/active_cycle.py
Normal file
70
apiserver/plane/app/serializers/active_cycle.py
Normal file
@@ -0,0 +1,70 @@
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
)
|
||||
|
||||
|
||||
class ActiveCycleSerializer(BaseSerializer):
|
||||
# favorite
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
# state group wise distribution
|
||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||
completed_issues = serializers.IntegerField(read_only=True)
|
||||
started_issues = serializers.IntegerField(read_only=True)
|
||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_issues = serializers.IntegerField(read_only=True)
|
||||
|
||||
backlog_estimate_points = serializers.IntegerField(read_only=True)
|
||||
unstarted_estimate_points = serializers.IntegerField(read_only=True)
|
||||
started_estimate_points = serializers.IntegerField(read_only=True)
|
||||
cancelled_estimate_points = serializers.IntegerField(read_only=True)
|
||||
total_estimate_points = serializers.IntegerField(read_only=True)
|
||||
|
||||
# active | draft | upcoming | completed
|
||||
status = serializers.CharField(read_only=True)
|
||||
|
||||
# project details
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
|
||||
class Meta:
|
||||
model = Cycle
|
||||
fields = [
|
||||
# necessary fields
|
||||
"id",
|
||||
"workspace_id",
|
||||
"project_id",
|
||||
# model fields
|
||||
"name",
|
||||
"description",
|
||||
"start_date",
|
||||
"end_date",
|
||||
"owned_by_id",
|
||||
"view_props",
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
"progress_snapshot",
|
||||
# meta fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
"completed_issues",
|
||||
"started_issues",
|
||||
"unstarted_issues",
|
||||
"backlog_issues",
|
||||
"status",
|
||||
"project_detail",
|
||||
"backlog_estimate_points",
|
||||
"unstarted_estimate_points",
|
||||
"started_estimate_points",
|
||||
"cancelled_estimate_points",
|
||||
"total_estimate_points",
|
||||
|
||||
]
|
||||
read_only_fields = fields
|
||||
@@ -3,6 +3,7 @@ from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
|
||||
from .issue import IssueStateSerializer
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
|
||||
8
apiserver/plane/app/serializers/integration/__init__.py
Normal file
8
apiserver/plane/app/serializers/integration/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from .base import IntegrationSerializer, WorkspaceIntegrationSerializer
|
||||
from .github import (
|
||||
GithubRepositorySerializer,
|
||||
GithubRepositorySyncSerializer,
|
||||
GithubIssueSyncSerializer,
|
||||
GithubCommentSyncSerializer,
|
||||
)
|
||||
from .slack import SlackProjectSyncSerializer
|
||||
22
apiserver/plane/app/serializers/integration/base.py
Normal file
22
apiserver/plane/app/serializers/integration/base.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Module imports
|
||||
from plane.app.serializers import BaseSerializer
|
||||
from plane.db.models import Integration, WorkspaceIntegration
|
||||
|
||||
|
||||
class IntegrationSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"verified",
|
||||
]
|
||||
|
||||
|
||||
class WorkspaceIntegrationSerializer(BaseSerializer):
|
||||
integration_detail = IntegrationSerializer(
|
||||
read_only=True, source="integration"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = WorkspaceIntegration
|
||||
fields = "__all__"
|
||||
45
apiserver/plane/app/serializers/integration/github.py
Normal file
45
apiserver/plane/app/serializers/integration/github.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Module imports
|
||||
from plane.app.serializers import BaseSerializer
|
||||
from plane.db.models import (
|
||||
GithubIssueSync,
|
||||
GithubRepository,
|
||||
GithubRepositorySync,
|
||||
GithubCommentSync,
|
||||
)
|
||||
|
||||
|
||||
class GithubRepositorySerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = GithubRepository
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class GithubRepositorySyncSerializer(BaseSerializer):
|
||||
repo_detail = GithubRepositorySerializer(source="repository")
|
||||
|
||||
class Meta:
|
||||
model = GithubRepositorySync
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class GithubIssueSyncSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = GithubIssueSync
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"project",
|
||||
"workspace",
|
||||
"repository_sync",
|
||||
]
|
||||
|
||||
|
||||
class GithubCommentSyncSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = GithubCommentSync
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"project",
|
||||
"workspace",
|
||||
"repository_sync",
|
||||
"issue_sync",
|
||||
]
|
||||
14
apiserver/plane/app/serializers/integration/slack.py
Normal file
14
apiserver/plane/app/serializers/integration/slack.py
Normal file
@@ -0,0 +1,14 @@
|
||||
# Module imports
|
||||
from plane.app.serializers import BaseSerializer
|
||||
from plane.db.models import SlackProjectSync
|
||||
|
||||
|
||||
class SlackProjectSyncSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = SlackProjectSync
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"project",
|
||||
"workspace",
|
||||
"workspace_integration",
|
||||
]
|
||||
@@ -13,6 +13,155 @@ from plane.db.models import (
|
||||
)
|
||||
|
||||
|
||||
class WorkspacePageSerializer(BaseSerializer):
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
labels = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
projects = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(
|
||||
queryset=Project.objects.all()
|
||||
),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Page
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"owned_by",
|
||||
"access",
|
||||
"color",
|
||||
"labels",
|
||||
"parent",
|
||||
"is_favorite",
|
||||
"is_locked",
|
||||
"archived_at",
|
||||
"workspace",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"view_props",
|
||||
"logo_props",
|
||||
"projects",
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"owned_by",
|
||||
]
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
data["labels"] = [str(label.id) for label in instance.labels.all()]
|
||||
data["projects"] = [
|
||||
str(project.id) for project in instance.projects.all()
|
||||
]
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
labels = validated_data.pop("labels", None)
|
||||
projects = validated_data.pop("projects", None)
|
||||
owned_by_id = self.context["owned_by_id"]
|
||||
description_html = self.context["description_html"]
|
||||
workspace_id = self.context["workspace_id"]
|
||||
|
||||
# Get the workspace id from the project
|
||||
page = Page.objects.create(
|
||||
**validated_data,
|
||||
description_html=description_html,
|
||||
owned_by_id=owned_by_id,
|
||||
workspace_id=workspace_id,
|
||||
)
|
||||
|
||||
# Create the page labels
|
||||
if labels is not None:
|
||||
PageLabel.objects.bulk_create(
|
||||
[
|
||||
PageLabel(
|
||||
label=label,
|
||||
page=page,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=page.created_by_id,
|
||||
updated_by_id=page.updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
#
|
||||
if projects is not None:
|
||||
ProjectPage.objects.bulk_create(
|
||||
[
|
||||
ProjectPage(
|
||||
workspace_id=page.workspace_id,
|
||||
project_id=project,
|
||||
page_id=page.id,
|
||||
created_by_id=page.created_by_id,
|
||||
updated_by_id=page.updated_by_id,
|
||||
)
|
||||
for project in projects
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
return page
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
labels = validated_data.pop("labels", None)
|
||||
projects = validated_data.pop("projects", None)
|
||||
|
||||
if projects is not None:
|
||||
ProjectPage.objects.filter(page=instance).delete()
|
||||
ProjectPage.objects.bulk_create(
|
||||
[
|
||||
ProjectPage(
|
||||
workspace_id=instance.workspace_id,
|
||||
project_id=project,
|
||||
page_id=instance.id,
|
||||
created_by_id=instance.created_by_id,
|
||||
updated_by_id=instance.updated_by_id,
|
||||
)
|
||||
for project in projects
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
if labels is not None:
|
||||
PageLabel.objects.filter(page=instance).delete()
|
||||
PageLabel.objects.bulk_create(
|
||||
[
|
||||
PageLabel(
|
||||
label=label,
|
||||
page=instance,
|
||||
workspace_id=instance.workspace_id,
|
||||
created_by_id=instance.created_by_id,
|
||||
updated_by_id=instance.updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class WorkspacePageDetailSerializer(BaseSerializer):
|
||||
description_html = serializers.CharField()
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta(WorkspacePageSerializer.Meta):
|
||||
fields = WorkspacePageSerializer.Meta.fields + [
|
||||
"description_html",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class PageSerializer(BaseSerializer):
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
labels = serializers.ListField(
|
||||
@@ -124,6 +273,7 @@ class PageSerializer(BaseSerializer):
|
||||
|
||||
class PageDetailSerializer(PageSerializer):
|
||||
description_html = serializers.CharField()
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta(PageSerializer.Meta):
|
||||
fields = PageSerializer.Meta.fields + [
|
||||
|
||||
@@ -18,6 +18,15 @@ from .views import urlpatterns as view_urls
|
||||
from .webhook import urlpatterns as webhook_urls
|
||||
from .workspace import urlpatterns as workspace_urls
|
||||
|
||||
# Integrations URLS
|
||||
from .importer import urlpatterns as importer_urls
|
||||
from .integration import urlpatterns as integration_urls
|
||||
|
||||
from .active_cycle import urlpatterns as active_cycle_urls
|
||||
|
||||
# url patterns
|
||||
from plane.ee.urls import urlpatterns as ee_urls
|
||||
|
||||
urlpatterns = [
|
||||
*analytic_urls,
|
||||
*asset_urls,
|
||||
@@ -38,4 +47,9 @@ urlpatterns = [
|
||||
*workspace_urls,
|
||||
*api_urls,
|
||||
*webhook_urls,
|
||||
# ee
|
||||
*active_cycle_urls,
|
||||
*integration_urls,
|
||||
*importer_urls,
|
||||
*ee_urls,
|
||||
]
|
||||
|
||||
13
apiserver/plane/app/urls/active_cycle.py
Normal file
13
apiserver/plane/app/urls/active_cycle.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.app.views import (
|
||||
ActiveCycleEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/active-cycles/",
|
||||
ActiveCycleEndpoint.as_view(),
|
||||
name="workspace-active-cycle",
|
||||
),
|
||||
]
|
||||
43
apiserver/plane/app/urls/importer.py
Normal file
43
apiserver/plane/app/urls/importer.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.app.views import (
|
||||
ServiceIssueImportSummaryEndpoint,
|
||||
ImportServiceEndpoint,
|
||||
UpdateServiceImportStatusEndpoint,
|
||||
BulkImportIssuesEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/importers/<str:service>/",
|
||||
ServiceIssueImportSummaryEndpoint.as_view(),
|
||||
name="importer-summary",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/importers/<str:service>/",
|
||||
ImportServiceEndpoint.as_view(),
|
||||
name="importer",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/importers/",
|
||||
ImportServiceEndpoint.as_view(),
|
||||
name="importer",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/importers/<str:service>/<uuid:pk>/",
|
||||
ImportServiceEndpoint.as_view(),
|
||||
name="importer",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/service/<str:service>/importers/<uuid:importer_id>/",
|
||||
UpdateServiceImportStatusEndpoint.as_view(),
|
||||
name="importer-status",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
|
||||
BulkImportIssuesEndpoint.as_view(),
|
||||
name="bulk-import-issues",
|
||||
),
|
||||
]
|
||||
150
apiserver/plane/app/urls/integration.py
Normal file
150
apiserver/plane/app/urls/integration.py
Normal file
@@ -0,0 +1,150 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.app.views import (
|
||||
IntegrationViewSet,
|
||||
WorkspaceIntegrationViewSet,
|
||||
GithubRepositoriesEndpoint,
|
||||
GithubRepositorySyncViewSet,
|
||||
GithubIssueSyncViewSet,
|
||||
GithubCommentSyncViewSet,
|
||||
BulkCreateGithubIssueSyncEndpoint,
|
||||
SlackProjectSyncViewSet,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"integrations/",
|
||||
IntegrationViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="integrations",
|
||||
),
|
||||
path(
|
||||
"integrations/<uuid:pk>/",
|
||||
IntegrationViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="integrations",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-integrations/",
|
||||
WorkspaceIntegrationViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
name="workspace-integrations",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-integrations/<str:provider>/",
|
||||
WorkspaceIntegrationViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="workspace-integrations",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-integrations/<uuid:pk>/provider/",
|
||||
WorkspaceIntegrationViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="workspace-integrations",
|
||||
),
|
||||
# Github Integrations
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-integrations/<uuid:workspace_integration_id>/github-repositories/",
|
||||
GithubRepositoriesEndpoint.as_view(),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/",
|
||||
GithubRepositorySyncViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/<uuid:pk>/",
|
||||
GithubRepositorySyncViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/",
|
||||
GithubIssueSyncViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/bulk-create-github-issue-sync/",
|
||||
BulkCreateGithubIssueSyncEndpoint.as_view(),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
|
||||
GithubIssueSyncViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/",
|
||||
GithubCommentSyncViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/<uuid:pk>/",
|
||||
GithubCommentSyncViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
),
|
||||
## End Github Integrations
|
||||
# Slack Integration
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/",
|
||||
SlackProjectSyncViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/<uuid:pk>/",
|
||||
SlackProjectSyncViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
"get": "retrieve",
|
||||
}
|
||||
),
|
||||
),
|
||||
## End Slack Integration
|
||||
]
|
||||
@@ -19,8 +19,6 @@ from plane.app.views import (
|
||||
IssueUserDisplayPropertyEndpoint,
|
||||
IssueViewSet,
|
||||
LabelViewSet,
|
||||
BulkIssueOperationsEndpoint,
|
||||
BulkArchiveIssuesEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
@@ -83,11 +81,6 @@ urlpatterns = [
|
||||
BulkDeleteIssuesEndpoint.as_view(),
|
||||
name="project-issues-bulk",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-archive-issues/",
|
||||
BulkArchiveIssuesEndpoint.as_view(),
|
||||
name="bulk-archive-issues",
|
||||
),
|
||||
##
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
|
||||
@@ -305,9 +298,4 @@ urlpatterns = [
|
||||
),
|
||||
name="project-issue-draft",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-operation-issues/",
|
||||
BulkIssueOperationsEndpoint.as_view(),
|
||||
name="bulk-operations-issues",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -7,6 +7,8 @@ from plane.app.views import (
|
||||
PageLogEndpoint,
|
||||
SubPagesEndpoint,
|
||||
PagesDescriptionViewSet,
|
||||
WorkspacePageViewSet,
|
||||
WorkspacePagesDescriptionViewSet,
|
||||
)
|
||||
|
||||
|
||||
@@ -32,6 +34,27 @@ urlpatterns = [
|
||||
),
|
||||
name="project-pages",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/pages/",
|
||||
WorkspacePageViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="workspace-pages",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/pages/<uuid:pk>/",
|
||||
WorkspacePageViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="workspace-pages",
|
||||
),
|
||||
# favorite pages
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/favorite-pages/<uuid:pk>/",
|
||||
@@ -54,16 +77,26 @@ urlpatterns = [
|
||||
),
|
||||
name="project-page-archive-unarchive",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/pages/<uuid:pk>/archive/",
|
||||
WorkspacePageViewSet.as_view(
|
||||
{
|
||||
"post": "archive",
|
||||
"delete": "unarchive",
|
||||
}
|
||||
),
|
||||
name="workspace-page-archive-unarchive",
|
||||
),
|
||||
# lock and unlock
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/lock/",
|
||||
PageViewSet.as_view(
|
||||
"workspaces/<str:slug>/pages/<uuid:pk>/lock/",
|
||||
WorkspacePageViewSet.as_view(
|
||||
{
|
||||
"post": "lock",
|
||||
"delete": "unlock",
|
||||
}
|
||||
),
|
||||
name="project-pages-lock-unlock",
|
||||
name="workspace-pages-lock-unlock",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/transactions/",
|
||||
@@ -90,4 +123,14 @@ urlpatterns = [
|
||||
),
|
||||
name="page-description",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/pages/<uuid:pk>/description/",
|
||||
WorkspacePagesDescriptionViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
}
|
||||
),
|
||||
name="page-description",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -4,6 +4,9 @@ from django.urls import path
|
||||
from plane.app.views import (
|
||||
GlobalSearchEndpoint,
|
||||
IssueSearchEndpoint,
|
||||
SearchEndpoint,
|
||||
WorkspaceSearchEndpoint,
|
||||
WorkspaceEntitySearchEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -18,4 +21,19 @@ urlpatterns = [
|
||||
IssueSearchEndpoint.as_view(),
|
||||
name="project-issue-search",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/search/",
|
||||
SearchEndpoint.as_view(),
|
||||
name="search",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/app-search/",
|
||||
WorkspaceSearchEndpoint.as_view(),
|
||||
name="app-search",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/entity-search/",
|
||||
WorkspaceEntitySearchEndpoint.as_view(),
|
||||
name="entity-search",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -94,6 +94,7 @@ from .cycle.base import (
|
||||
CycleViewSet,
|
||||
TransferCycleIssueEndpoint,
|
||||
)
|
||||
from .cycle.active_cycle import ActiveCycleEndpoint
|
||||
from .cycle.issue import (
|
||||
CycleIssueViewSet,
|
||||
)
|
||||
@@ -113,7 +114,7 @@ from .issue.activity import (
|
||||
IssueActivityEndpoint,
|
||||
)
|
||||
|
||||
from .issue.archive import IssueArchiveViewSet, BulkArchiveIssuesEndpoint
|
||||
from .issue.archive import IssueArchiveViewSet
|
||||
|
||||
from .issue.attachment import (
|
||||
IssueAttachmentEndpoint,
|
||||
@@ -151,9 +152,6 @@ from .issue.subscriber import (
|
||||
IssueSubscriberViewSet,
|
||||
)
|
||||
|
||||
|
||||
from .issue.bulk_operations import BulkIssueOperationsEndpoint
|
||||
|
||||
from .module.base import (
|
||||
ModuleViewSet,
|
||||
ModuleLinkViewSet,
|
||||
@@ -179,10 +177,21 @@ from .page.base import (
|
||||
SubPagesEndpoint,
|
||||
PagesDescriptionViewSet,
|
||||
)
|
||||
from .page.workspace import (
|
||||
WorkspacePageViewSet,
|
||||
WorkspacePagesDescriptionViewSet,
|
||||
)
|
||||
from .search.base import (
|
||||
GlobalSearchEndpoint,
|
||||
SearchEndpoint,
|
||||
)
|
||||
|
||||
from .search.base import GlobalSearchEndpoint
|
||||
from .search.issue import IssueSearchEndpoint
|
||||
|
||||
from .search.workspace import (
|
||||
WorkspaceSearchEndpoint,
|
||||
WorkspaceEntitySearchEndpoint,
|
||||
)
|
||||
|
||||
from .external.base import (
|
||||
GPTIntegrationEndpoint,
|
||||
@@ -224,6 +233,28 @@ from .dashboard.base import DashboardEndpoint, WidgetsEndpoint
|
||||
|
||||
from .error_404 import custom_404_view
|
||||
|
||||
from .importer.base import (
|
||||
ServiceIssueImportSummaryEndpoint,
|
||||
ImportServiceEndpoint,
|
||||
UpdateServiceImportStatusEndpoint,
|
||||
BulkImportIssuesEndpoint,
|
||||
BulkImportModulesEndpoint,
|
||||
)
|
||||
|
||||
from .integration.base import (
|
||||
IntegrationViewSet,
|
||||
WorkspaceIntegrationViewSet,
|
||||
)
|
||||
|
||||
from .integration.github import (
|
||||
GithubRepositoriesEndpoint,
|
||||
GithubRepositorySyncViewSet,
|
||||
GithubIssueSyncViewSet,
|
||||
GithubCommentSyncViewSet,
|
||||
BulkCreateGithubIssueSyncEndpoint,
|
||||
)
|
||||
|
||||
from .integration.slack import SlackProjectSyncViewSet
|
||||
from .exporter.base import ExportIssuesEndpoint
|
||||
from .notification.base import MarkAllReadNotificationViewSet
|
||||
from .user.base import AccountEndpoint, ProfileEndpoint, UserSessionEndpoint
|
||||
|
||||
478
apiserver/plane/app/views/cycle/active_cycle.py
Normal file
478
apiserver/plane/app/views/cycle/active_cycle.py
Normal file
@@ -0,0 +1,478 @@
|
||||
# Django imports
|
||||
from django.db.models import (
|
||||
Case,
|
||||
CharField,
|
||||
Count,
|
||||
Exists,
|
||||
F,
|
||||
OuterRef,
|
||||
Prefetch,
|
||||
Q,
|
||||
Value,
|
||||
Sum,
|
||||
When,
|
||||
Subquery,
|
||||
IntegerField,
|
||||
)
|
||||
from django.db.models.functions import Cast, Coalesce
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
# Module imports
|
||||
from plane.app.permissions import (
|
||||
WorkspaceUserPermission,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
ActiveCycleSerializer,
|
||||
)
|
||||
from plane.db.models import Cycle, CycleFavorite, Issue, Label, User, Project
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
from plane.app.views.base import BaseAPIView
|
||||
|
||||
|
||||
class ActiveCycleEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceUserPermission,
|
||||
]
|
||||
|
||||
def get_results_controller(self, results, plot_type, active_cycles=None):
|
||||
for cycle in results:
|
||||
estimate_type = Project.objects.filter(
|
||||
pk=cycle["project_id"],
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
estimate__isnull=False,
|
||||
estimate__type="points",
|
||||
).exists()
|
||||
cycle["estimate_distribution"] = {}
|
||||
if estimate_type:
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_cycle__cycle_id=cycle["id"],
|
||||
project_id=cycle["project_id"],
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.values("display_name", "assignee_id", "avatar")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("display_name")
|
||||
)
|
||||
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_cycle__cycle_id=cycle["id"],
|
||||
project_id=cycle["project_id"],
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
cycle["estimate_distribution"] = {
|
||||
"assignees": assignee_distribution,
|
||||
"labels": label_distribution,
|
||||
"completion_chart": {},
|
||||
}
|
||||
|
||||
if cycle["start_date"] and cycle["end_date"]:
|
||||
cycle["estimate_distribution"]["completion_chart"] = (
|
||||
burndown_plot(
|
||||
queryset=active_cycles.get(pk=cycle["id"]),
|
||||
slug=self.kwargs.get("slug"),
|
||||
project_id=cycle["project_id"],
|
||||
cycle_id=cycle["id"],
|
||||
plot_type="points",
|
||||
)
|
||||
)
|
||||
|
||||
assignee_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle["id"],
|
||||
project_id=cycle["project_id"],
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.values("display_name", "assignee_id", "avatar")
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"assignee_id",
|
||||
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"assignee_id",
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"assignee_id",
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("display_name")
|
||||
)
|
||||
|
||||
label_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle["id"],
|
||||
project_id=cycle["project_id"],
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"label_id",
|
||||
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"label_id",
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"label_id",
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
cycle["distribution"] = {
|
||||
"assignees": assignee_distribution,
|
||||
"labels": label_distribution,
|
||||
"completion_chart": {},
|
||||
}
|
||||
if cycle["start_date"] and cycle["end_date"]:
|
||||
cycle["distribution"]["completion_chart"] = burndown_plot(
|
||||
queryset=active_cycles.get(pk=cycle["id"]),
|
||||
slug=self.kwargs.get("slug"),
|
||||
project_id=cycle["project_id"],
|
||||
cycle_id=cycle["id"],
|
||||
plot_type="issues",
|
||||
)
|
||||
return results
|
||||
|
||||
def get(self, request, slug):
|
||||
plot_type = request.GET.get("plot_type", "issues")
|
||||
subquery = CycleFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
cycle_id=OuterRef("pk"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
backlog_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="backlog",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
backlog_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.values("backlog_estimate_point")[:1]
|
||||
)
|
||||
unstarted_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="unstarted",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
unstarted_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.values("unstarted_estimate_point")[:1]
|
||||
)
|
||||
started_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="started",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
started_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.values("started_estimate_point")[:1]
|
||||
)
|
||||
cancelled_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="cancelled",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
cancelled_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.values("cancelled_estimate_point")[:1]
|
||||
)
|
||||
completed_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="completed",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
completed_estimate_points=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.values("completed_estimate_points")[:1]
|
||||
)
|
||||
total_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
total_estimate_points=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.values("total_estimate_points")[:1]
|
||||
)
|
||||
active_cycles = (
|
||||
Cycle.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
start_date__lte=timezone.now(),
|
||||
end_date__gte=timezone.now(),
|
||||
)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"issue_cycle",
|
||||
filter=Q(
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cancelled_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="cancelled",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
unstarted_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="unstarted",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="backlog",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
status=Case(
|
||||
When(
|
||||
Q(start_date__lte=timezone.now())
|
||||
& Q(end_date__gte=timezone.now()),
|
||||
then=Value("CURRENT"),
|
||||
),
|
||||
When(
|
||||
start_date__gt=timezone.now(), then=Value("UPCOMING")
|
||||
),
|
||||
When(end_date__lt=timezone.now(), then=Value("COMPLETED")),
|
||||
When(
|
||||
Q(start_date__isnull=True) & Q(end_date__isnull=True),
|
||||
then=Value("DRAFT"),
|
||||
),
|
||||
default=Value("DRAFT"),
|
||||
output_field=CharField(),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_estimate_points=Coalesce(
|
||||
Subquery(backlog_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
unstarted_estimate_points=Coalesce(
|
||||
Subquery(unstarted_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
started_estimate_points=Coalesce(
|
||||
Subquery(started_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
cancelled_estimate_points=Coalesce(
|
||||
Subquery(cancelled_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_estimate_points=Coalesce(
|
||||
Subquery(completed_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
total_estimate_points=Coalesce(
|
||||
Subquery(total_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_cycle__issue__assignees",
|
||||
queryset=User.objects.only(
|
||||
"avatar", "first_name", "id"
|
||||
).distinct(),
|
||||
)
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_cycle__issue__labels",
|
||||
queryset=Label.objects.only(
|
||||
"name", "color", "id"
|
||||
).distinct(),
|
||||
)
|
||||
)
|
||||
.order_by("-created_at")
|
||||
)
|
||||
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=active_cycles,
|
||||
on_results=lambda active_cycles: ActiveCycleSerializer(
|
||||
active_cycles, many=True
|
||||
).data,
|
||||
controller=lambda results: self.get_results_controller(
|
||||
results, plot_type, active_cycles
|
||||
),
|
||||
default_per_page=int(request.GET.get("per_page", 3)),
|
||||
)
|
||||
@@ -28,6 +28,7 @@ from django.core.serializers.json import DjangoJSONEncoder
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from plane.app.permissions import (
|
||||
ProjectEntityPermission,
|
||||
ProjectLitePermission,
|
||||
@@ -37,7 +38,7 @@ from plane.app.serializers import (
|
||||
CycleUserPropertiesSerializer,
|
||||
CycleWriteSerializer,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
CycleIssue,
|
||||
|
||||
@@ -20,12 +20,12 @@ from rest_framework.response import Response
|
||||
from plane.app.permissions import (
|
||||
ProjectEntityPermission,
|
||||
)
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet
|
||||
from plane.app.serializers import (
|
||||
CycleIssueSerializer,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
CycleIssue,
|
||||
@@ -38,6 +38,7 @@ from plane.utils.grouper import (
|
||||
issue_on_results,
|
||||
issue_queryset_grouper,
|
||||
)
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import (
|
||||
@@ -47,6 +48,7 @@ from plane.utils.paginator import (
|
||||
|
||||
# Module imports
|
||||
|
||||
|
||||
class CycleIssueViewSet(BaseViewSet):
|
||||
serializer_class = CycleIssueSerializer
|
||||
model = CycleIssue
|
||||
|
||||
560
apiserver/plane/app/views/importer/base.py
Normal file
560
apiserver/plane/app/views/importer/base.py
Normal file
@@ -0,0 +1,560 @@
|
||||
# Python imports
|
||||
import uuid
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Max, Q
|
||||
|
||||
# Module imports
|
||||
from plane.app.views import BaseAPIView
|
||||
from plane.db.models import (
|
||||
WorkspaceIntegration,
|
||||
Importer,
|
||||
APIToken,
|
||||
Project,
|
||||
State,
|
||||
IssueSequence,
|
||||
Issue,
|
||||
IssueActivity,
|
||||
IssueComment,
|
||||
IssueLink,
|
||||
IssueLabel,
|
||||
Workspace,
|
||||
IssueAssignee,
|
||||
Module,
|
||||
ModuleLink,
|
||||
ModuleIssue,
|
||||
Label,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
ImporterSerializer,
|
||||
IssueFlatSerializer,
|
||||
ModuleSerializer,
|
||||
)
|
||||
from plane.utils.integrations.github import get_github_repo_details
|
||||
from plane.utils.importers.jira import (
|
||||
jira_project_issue_summary,
|
||||
is_allowed_hostname,
|
||||
)
|
||||
from plane.bgtasks.importer_task import service_importer
|
||||
from plane.utils.html_processor import strip_tags
|
||||
from plane.app.permissions import WorkSpaceAdminPermission
|
||||
|
||||
|
||||
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, service):
|
||||
if service == "github":
|
||||
owner = request.GET.get("owner", False)
|
||||
repo = request.GET.get("repo", False)
|
||||
|
||||
if not owner or not repo:
|
||||
return Response(
|
||||
{"error": "Owner and repo are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
integration__provider="github", workspace__slug=slug
|
||||
)
|
||||
|
||||
access_tokens_url = workspace_integration.metadata.get(
|
||||
"access_tokens_url", False
|
||||
)
|
||||
|
||||
if not access_tokens_url:
|
||||
return Response(
|
||||
{
|
||||
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
issue_count, labels, collaborators = get_github_repo_details(
|
||||
access_tokens_url, owner, repo
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"issue_count": issue_count,
|
||||
"labels": labels,
|
||||
"collaborators": collaborators,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
if service == "jira":
|
||||
# Check for all the keys
|
||||
params = {
|
||||
"project_key": "Project key is required",
|
||||
"api_token": "API token is required",
|
||||
"email": "Email is required",
|
||||
"cloud_hostname": "Cloud hostname is required",
|
||||
}
|
||||
|
||||
for key, error_message in params.items():
|
||||
if not request.GET.get(key, False):
|
||||
return Response(
|
||||
{"error": error_message},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
project_key = request.GET.get("project_key", "")
|
||||
api_token = request.GET.get("api_token", "")
|
||||
email = request.GET.get("email", "")
|
||||
cloud_hostname = request.GET.get("cloud_hostname", "")
|
||||
|
||||
response = jira_project_issue_summary(
|
||||
email, api_token, project_key, cloud_hostname
|
||||
)
|
||||
if "error" in response:
|
||||
return Response(response, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(
|
||||
response,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return Response(
|
||||
{"error": "Service not supported yet"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class ImportServiceEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
def post(self, request, slug, service):
|
||||
project_id = request.data.get("project_id", False)
|
||||
|
||||
if not project_id:
|
||||
return Response(
|
||||
{"error": "Project ID is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
if service == "github":
|
||||
data = request.data.get("data", False)
|
||||
metadata = request.data.get("metadata", False)
|
||||
config = request.data.get("config", False)
|
||||
if not data or not metadata or not config:
|
||||
return Response(
|
||||
{"error": "Data, config and metadata are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
api_token = APIToken.objects.filter(
|
||||
user=request.user, workspace=workspace
|
||||
).first()
|
||||
if api_token is None:
|
||||
api_token = APIToken.objects.create(
|
||||
user=request.user,
|
||||
label="Importer",
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
importer = Importer.objects.create(
|
||||
service=service,
|
||||
project_id=project_id,
|
||||
status="queued",
|
||||
initiated_by=request.user,
|
||||
data=data,
|
||||
metadata=metadata,
|
||||
token=api_token,
|
||||
config=config,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
|
||||
service_importer.delay(service, importer.id)
|
||||
serializer = ImporterSerializer(importer)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
if service == "jira":
|
||||
data = request.data.get("data", False)
|
||||
metadata = request.data.get("metadata", False)
|
||||
config = request.data.get("config", False)
|
||||
|
||||
cloud_hostname = metadata.get("cloud_hostname", False)
|
||||
|
||||
if not cloud_hostname:
|
||||
return Response(
|
||||
{"error": "Cloud hostname is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not is_allowed_hostname(cloud_hostname):
|
||||
return Response(
|
||||
{"error": "Hostname is not a valid hostname."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not data or not metadata:
|
||||
return Response(
|
||||
{"error": "Data, config and metadata are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
api_token = APIToken.objects.filter(
|
||||
user=request.user, workspace=workspace
|
||||
).first()
|
||||
if api_token is None:
|
||||
api_token = APIToken.objects.create(
|
||||
user=request.user,
|
||||
label="Importer",
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
importer = Importer.objects.create(
|
||||
service=service,
|
||||
project_id=project_id,
|
||||
status="queued",
|
||||
initiated_by=request.user,
|
||||
data=data,
|
||||
metadata=metadata,
|
||||
token=api_token,
|
||||
config=config,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
|
||||
service_importer.delay(service, importer.id)
|
||||
serializer = ImporterSerializer(importer)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
return Response(
|
||||
{"error": "Servivce not supported yet"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def get(self, request, slug):
|
||||
imports = (
|
||||
Importer.objects.filter(workspace__slug=slug)
|
||||
.order_by("-created_at")
|
||||
.select_related("initiated_by", "project", "workspace")
|
||||
)
|
||||
serializer = ImporterSerializer(imports, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
def delete(self, request, slug, service, pk):
|
||||
importer = Importer.objects.get(
|
||||
pk=pk, service=service, workspace__slug=slug
|
||||
)
|
||||
|
||||
if importer.imported_data is not None:
|
||||
# Delete all imported Issues
|
||||
imported_issues = importer.imported_data.get("issues", [])
|
||||
Issue.issue_objects.filter(id__in=imported_issues).delete()
|
||||
|
||||
# Delete all imported Labels
|
||||
imported_labels = importer.imported_data.get("labels", [])
|
||||
Label.objects.filter(id__in=imported_labels).delete()
|
||||
|
||||
if importer.service == "jira":
|
||||
imported_modules = importer.imported_data.get("modules", [])
|
||||
Module.objects.filter(id__in=imported_modules).delete()
|
||||
importer.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def patch(self, request, slug, service, pk):
|
||||
importer = Importer.objects.get(
|
||||
pk=pk, service=service, workspace__slug=slug
|
||||
)
|
||||
serializer = ImporterSerializer(
|
||||
importer, data=request.data, partial=True
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class UpdateServiceImportStatusEndpoint(BaseAPIView):
|
||||
def post(self, request, slug, project_id, service, importer_id):
|
||||
importer = Importer.objects.get(
|
||||
pk=importer_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
service=service,
|
||||
)
|
||||
importer.status = request.data.get("status", "processing")
|
||||
importer.save()
|
||||
return Response(status.HTTP_200_OK)
|
||||
|
||||
|
||||
class BulkImportIssuesEndpoint(BaseAPIView):
|
||||
def post(self, request, slug, project_id, service):
|
||||
# Get the project
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
|
||||
# Get the default state
|
||||
default_state = State.objects.filter(
|
||||
~Q(name="Triage"), project_id=project_id, default=True
|
||||
).first()
|
||||
# if there is no default state assign any random state
|
||||
if default_state is None:
|
||||
default_state = State.objects.filter(
|
||||
~Q(name="Triage"), project_id=project_id
|
||||
).first()
|
||||
|
||||
# Get the maximum sequence_id
|
||||
last_id = IssueSequence.objects.filter(
|
||||
project_id=project_id
|
||||
).aggregate(largest=Max("sequence"))["largest"]
|
||||
|
||||
last_id = 1 if last_id is None else last_id + 1
|
||||
|
||||
# Get the maximum sort order
|
||||
largest_sort_order = Issue.objects.filter(
|
||||
project_id=project_id, state=default_state
|
||||
).aggregate(largest=Max("sort_order"))["largest"]
|
||||
|
||||
largest_sort_order = (
|
||||
65535 if largest_sort_order is None else largest_sort_order + 10000
|
||||
)
|
||||
|
||||
# Get the issues_data
|
||||
issues_data = request.data.get("issues_data", [])
|
||||
|
||||
if not len(issues_data):
|
||||
return Response(
|
||||
{"error": "Issue data is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Issues
|
||||
bulk_issues = []
|
||||
for issue_data in issues_data:
|
||||
bulk_issues.append(
|
||||
Issue(
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
state_id=(
|
||||
issue_data.get("state")
|
||||
if issue_data.get("state", False)
|
||||
else default_state.id
|
||||
),
|
||||
name=issue_data.get("name", "Issue Created through Bulk"),
|
||||
description_html=issue_data.get(
|
||||
"description_html", "<p></p>"
|
||||
),
|
||||
description_stripped=(
|
||||
None
|
||||
if (
|
||||
issue_data.get("description_html") == ""
|
||||
or issue_data.get("description_html") is None
|
||||
)
|
||||
else strip_tags(issue_data.get("description_html"))
|
||||
),
|
||||
sequence_id=last_id,
|
||||
sort_order=largest_sort_order,
|
||||
start_date=issue_data.get("start_date", None),
|
||||
target_date=issue_data.get("target_date", None),
|
||||
priority=issue_data.get("priority", "none"),
|
||||
created_by=request.user,
|
||||
)
|
||||
)
|
||||
|
||||
largest_sort_order = largest_sort_order + 10000
|
||||
last_id = last_id + 1
|
||||
|
||||
issues = Issue.objects.bulk_create(
|
||||
bulk_issues,
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Sequences
|
||||
_ = IssueSequence.objects.bulk_create(
|
||||
[
|
||||
IssueSequence(
|
||||
issue=issue,
|
||||
sequence=issue.sequence_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
)
|
||||
for issue in issues
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
# Attach Labels
|
||||
bulk_issue_labels = []
|
||||
for issue, issue_data in zip(issues, issues_data):
|
||||
labels_list = issue_data.get("labels_list", [])
|
||||
bulk_issue_labels = bulk_issue_labels + [
|
||||
IssueLabel(
|
||||
issue=issue,
|
||||
label_id=label_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for label_id in labels_list
|
||||
]
|
||||
|
||||
_ = IssueLabel.objects.bulk_create(
|
||||
bulk_issue_labels, batch_size=100, ignore_conflicts=True
|
||||
)
|
||||
|
||||
# Attach Assignees
|
||||
bulk_issue_assignees = []
|
||||
for issue, issue_data in zip(issues, issues_data):
|
||||
assignees_list = issue_data.get("assignees_list", [])
|
||||
bulk_issue_assignees = bulk_issue_assignees + [
|
||||
IssueAssignee(
|
||||
issue=issue,
|
||||
assignee_id=assignee_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for assignee_id in assignees_list
|
||||
]
|
||||
|
||||
_ = IssueAssignee.objects.bulk_create(
|
||||
bulk_issue_assignees, batch_size=100, ignore_conflicts=True
|
||||
)
|
||||
|
||||
# Track the issue activities
|
||||
IssueActivity.objects.bulk_create(
|
||||
[
|
||||
IssueActivity(
|
||||
issue=issue,
|
||||
actor=request.user,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
comment=f"imported the issue from {service}",
|
||||
verb="created",
|
||||
created_by=request.user,
|
||||
)
|
||||
for issue in issues
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
# Create Comments
|
||||
bulk_issue_comments = []
|
||||
for issue, issue_data in zip(issues, issues_data):
|
||||
comments_list = issue_data.get("comments_list", [])
|
||||
bulk_issue_comments = bulk_issue_comments + [
|
||||
IssueComment(
|
||||
issue=issue,
|
||||
comment_html=comment.get("comment_html", "<p></p>"),
|
||||
actor=request.user,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for comment in comments_list
|
||||
]
|
||||
|
||||
_ = IssueComment.objects.bulk_create(
|
||||
bulk_issue_comments, batch_size=100
|
||||
)
|
||||
|
||||
# Attach Links
|
||||
_ = IssueLink.objects.bulk_create(
|
||||
[
|
||||
IssueLink(
|
||||
issue=issue,
|
||||
url=issue_data.get("link", {}).get(
|
||||
"url", "https://github.com"
|
||||
),
|
||||
title=issue_data.get("link", {}).get(
|
||||
"title", "Original Issue"
|
||||
),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for issue, issue_data in zip(issues, issues_data)
|
||||
]
|
||||
)
|
||||
|
||||
return Response(
|
||||
{"issues": IssueFlatSerializer(issues, many=True).data},
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
|
||||
class BulkImportModulesEndpoint(BaseAPIView):
|
||||
def post(self, request, slug, project_id, service):
|
||||
modules_data = request.data.get("modules_data", [])
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
|
||||
modules = Module.objects.bulk_create(
|
||||
[
|
||||
Module(
|
||||
name=module.get("name", uuid.uuid4().hex),
|
||||
description=module.get("description", ""),
|
||||
start_date=module.get("start_date", None),
|
||||
target_date=module.get("target_date", None),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for module in modules_data
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
modules = Module.objects.filter(
|
||||
id__in=[module.id for module in modules]
|
||||
)
|
||||
|
||||
if len(modules) == len(modules_data):
|
||||
_ = ModuleLink.objects.bulk_create(
|
||||
[
|
||||
ModuleLink(
|
||||
module=module,
|
||||
url=module_data.get("link", {}).get(
|
||||
"url", "https://plane.so"
|
||||
),
|
||||
title=module_data.get("link", {}).get(
|
||||
"title", "Original Issue"
|
||||
),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for module, module_data in zip(modules, modules_data)
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
bulk_module_issues = []
|
||||
for module, module_data in zip(modules, modules_data):
|
||||
module_issues_list = module_data.get("module_issues_list", [])
|
||||
bulk_module_issues = bulk_module_issues + [
|
||||
ModuleIssue(
|
||||
issue_id=issue,
|
||||
module=module,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for issue in module_issues_list
|
||||
]
|
||||
|
||||
_ = ModuleIssue.objects.bulk_create(
|
||||
bulk_module_issues, batch_size=100, ignore_conflicts=True
|
||||
)
|
||||
|
||||
serializer = ModuleSerializer(modules, many=True)
|
||||
return Response(
|
||||
{"modules": serializer.data}, status=status.HTTP_201_CREATED
|
||||
)
|
||||
|
||||
else:
|
||||
return Response(
|
||||
{
|
||||
"message": "Modules created but issues could not be imported"
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
@@ -35,7 +35,7 @@ from plane.app.serializers import (
|
||||
InboxIssueDetailSerializer,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
|
||||
class InboxViewSet(BaseViewSet):
|
||||
|
||||
9
apiserver/plane/app/views/integration/__init__.py
Normal file
9
apiserver/plane/app/views/integration/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from .base import IntegrationViewSet, WorkspaceIntegrationViewSet
|
||||
from .github import (
|
||||
GithubRepositorySyncViewSet,
|
||||
GithubIssueSyncViewSet,
|
||||
BulkCreateGithubIssueSyncEndpoint,
|
||||
GithubCommentSyncViewSet,
|
||||
GithubRepositoriesEndpoint,
|
||||
)
|
||||
from .slack import SlackProjectSyncViewSet
|
||||
181
apiserver/plane/app/views/integration/base.py
Normal file
181
apiserver/plane/app/views/integration/base.py
Normal file
@@ -0,0 +1,181 @@
|
||||
# Python improts
|
||||
import uuid
|
||||
|
||||
# Django imports
|
||||
from django.contrib.auth.hashers import make_password
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
# Module imports
|
||||
from plane.app.views import BaseViewSet
|
||||
from plane.db.models import (
|
||||
Integration,
|
||||
WorkspaceIntegration,
|
||||
Workspace,
|
||||
User,
|
||||
WorkspaceMember,
|
||||
APIToken,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
IntegrationSerializer,
|
||||
WorkspaceIntegrationSerializer,
|
||||
)
|
||||
from plane.utils.integrations.github import (
|
||||
get_github_metadata,
|
||||
delete_github_installation,
|
||||
)
|
||||
from plane.app.permissions import WorkSpaceAdminPermission
|
||||
from plane.utils.integrations.slack import slack_oauth
|
||||
|
||||
|
||||
class IntegrationViewSet(BaseViewSet):
|
||||
serializer_class = IntegrationSerializer
|
||||
model = Integration
|
||||
|
||||
def create(self, request):
|
||||
serializer = IntegrationSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def partial_update(self, request, pk):
|
||||
integration = Integration.objects.get(pk=pk)
|
||||
if integration.verified:
|
||||
return Response(
|
||||
{"error": "Verified integrations cannot be updated"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = IntegrationSerializer(
|
||||
integration, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def destroy(self, request, pk):
|
||||
integration = Integration.objects.get(pk=pk)
|
||||
if integration.verified:
|
||||
return Response(
|
||||
{"error": "Verified integrations cannot be updated"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
integration.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class WorkspaceIntegrationViewSet(BaseViewSet):
|
||||
serializer_class = WorkspaceIntegrationSerializer
|
||||
model = WorkspaceIntegration
|
||||
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.select_related("integration")
|
||||
)
|
||||
|
||||
def create(self, request, slug, provider):
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
integration = Integration.objects.get(provider=provider)
|
||||
config = {}
|
||||
if provider == "github":
|
||||
installation_id = request.data.get("installation_id", None)
|
||||
if not installation_id:
|
||||
return Response(
|
||||
{"error": "Installation ID is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
metadata = get_github_metadata(installation_id)
|
||||
config = {"installation_id": installation_id}
|
||||
|
||||
if provider == "slack":
|
||||
code = request.data.get("code", False)
|
||||
|
||||
if not code:
|
||||
return Response(
|
||||
{"error": "Code is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
slack_response = slack_oauth(code=code)
|
||||
|
||||
metadata = slack_response
|
||||
access_token = metadata.get("access_token", False)
|
||||
team_id = metadata.get("team", {}).get("id", False)
|
||||
if not metadata or not access_token or not team_id:
|
||||
return Response(
|
||||
{
|
||||
"error": "Slack could not be installed. Please try again later"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
config = {"team_id": team_id, "access_token": access_token}
|
||||
|
||||
# Create a bot user
|
||||
bot_user = User.objects.create(
|
||||
email=f"{uuid.uuid4().hex}@plane.so",
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
is_bot=True,
|
||||
first_name=integration.title,
|
||||
avatar=(
|
||||
integration.avatar_url
|
||||
if integration.avatar_url is not None
|
||||
else ""
|
||||
),
|
||||
)
|
||||
|
||||
# Create an API Token for the bot user
|
||||
api_token = APIToken.objects.create(
|
||||
user=bot_user,
|
||||
user_type=1, # bot user
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.create(
|
||||
workspace=workspace,
|
||||
integration=integration,
|
||||
actor=bot_user,
|
||||
api_token=api_token,
|
||||
metadata=metadata,
|
||||
config=config,
|
||||
)
|
||||
|
||||
# Add bot user as a member of workspace
|
||||
_ = WorkspaceMember.objects.create(
|
||||
workspace=workspace_integration.workspace,
|
||||
member=bot_user,
|
||||
role=20,
|
||||
)
|
||||
return Response(
|
||||
WorkspaceIntegrationSerializer(workspace_integration).data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, pk):
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
pk=pk, workspace__slug=slug
|
||||
)
|
||||
|
||||
if workspace_integration.integration.provider == "github":
|
||||
installation_id = workspace_integration.config.get(
|
||||
"installation_id", False
|
||||
)
|
||||
if installation_id:
|
||||
delete_github_installation(installation_id=installation_id)
|
||||
|
||||
workspace_integration.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
201
apiserver/plane/app/views/integration/github.py
Normal file
201
apiserver/plane/app/views/integration/github.py
Normal file
@@ -0,0 +1,201 @@
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.app.views import BaseViewSet, BaseAPIView
|
||||
from plane.db.models import (
|
||||
GithubIssueSync,
|
||||
GithubRepositorySync,
|
||||
GithubRepository,
|
||||
WorkspaceIntegration,
|
||||
ProjectMember,
|
||||
Label,
|
||||
GithubCommentSync,
|
||||
Project,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
GithubIssueSyncSerializer,
|
||||
GithubRepositorySyncSerializer,
|
||||
GithubCommentSyncSerializer,
|
||||
)
|
||||
from plane.utils.integrations.github import get_github_repos
|
||||
from plane.app.permissions import (
|
||||
ProjectBasePermission,
|
||||
ProjectEntityPermission,
|
||||
)
|
||||
|
||||
|
||||
class GithubRepositoriesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectBasePermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, workspace_integration_id):
|
||||
page = request.GET.get("page", 1)
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
workspace__slug=slug, pk=workspace_integration_id
|
||||
)
|
||||
|
||||
if workspace_integration.integration.provider != "github":
|
||||
return Response(
|
||||
{"error": "Not a github integration"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
|
||||
repositories_url = (
|
||||
workspace_integration.metadata["repositories_url"]
|
||||
+ f"?per_page=100&page={page}"
|
||||
)
|
||||
repositories = get_github_repos(access_tokens_url, repositories_url)
|
||||
return Response(repositories, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class GithubRepositorySyncViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectBasePermission,
|
||||
]
|
||||
|
||||
serializer_class = GithubRepositorySyncSerializer
|
||||
model = GithubRepositorySync
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save(project_id=self.kwargs.get("project_id"))
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, workspace_integration_id):
|
||||
name = request.data.get("name", False)
|
||||
url = request.data.get("url", False)
|
||||
config = request.data.get("config", {})
|
||||
repository_id = request.data.get("repository_id", False)
|
||||
owner = request.data.get("owner", False)
|
||||
|
||||
if not name or not url or not repository_id or not owner:
|
||||
return Response(
|
||||
{"error": "Name, url, repository_id and owner are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the workspace integration
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
pk=workspace_integration_id
|
||||
)
|
||||
|
||||
# Delete the old repository object
|
||||
GithubRepositorySync.objects.filter(
|
||||
project_id=project_id, workspace__slug=slug
|
||||
).delete()
|
||||
GithubRepository.objects.filter(
|
||||
project_id=project_id, workspace__slug=slug
|
||||
).delete()
|
||||
|
||||
# Create repository
|
||||
repo = GithubRepository.objects.create(
|
||||
name=name,
|
||||
url=url,
|
||||
config=config,
|
||||
repository_id=repository_id,
|
||||
owner=owner,
|
||||
project_id=project_id,
|
||||
)
|
||||
|
||||
# Create a Label for github
|
||||
label = Label.objects.filter(
|
||||
name="GitHub",
|
||||
project_id=project_id,
|
||||
).first()
|
||||
|
||||
if label is None:
|
||||
label = Label.objects.create(
|
||||
name="GitHub",
|
||||
project_id=project_id,
|
||||
description="Label to sync Plane issues with GitHub issues",
|
||||
color="#003773",
|
||||
)
|
||||
|
||||
# Create repo sync
|
||||
repo_sync = GithubRepositorySync.objects.create(
|
||||
repository=repo,
|
||||
workspace_integration=workspace_integration,
|
||||
actor=workspace_integration.actor,
|
||||
credentials=request.data.get("credentials", {}),
|
||||
project_id=project_id,
|
||||
label=label,
|
||||
)
|
||||
|
||||
# Add bot as a member in the project
|
||||
_ = ProjectMember.objects.get_or_create(
|
||||
member=workspace_integration.actor, role=20, project_id=project_id
|
||||
)
|
||||
|
||||
# Return Response
|
||||
return Response(
|
||||
GithubRepositorySyncSerializer(repo_sync).data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
|
||||
class GithubIssueSyncViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
serializer_class = GithubIssueSyncSerializer
|
||||
model = GithubIssueSync
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save(
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
repository_sync_id=self.kwargs.get("repo_sync_id"),
|
||||
)
|
||||
|
||||
|
||||
class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
|
||||
def post(self, request, slug, project_id, repo_sync_id):
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
|
||||
github_issue_syncs = request.data.get("github_issue_syncs", [])
|
||||
github_issue_syncs = GithubIssueSync.objects.bulk_create(
|
||||
[
|
||||
GithubIssueSync(
|
||||
issue_id=github_issue_sync.get("issue"),
|
||||
repo_issue_id=github_issue_sync.get("repo_issue_id"),
|
||||
issue_url=github_issue_sync.get("issue_url"),
|
||||
github_issue_id=github_issue_sync.get("github_issue_id"),
|
||||
repository_sync_id=repo_sync_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
for github_issue_sync in github_issue_syncs
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
class GithubCommentSyncViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
serializer_class = GithubCommentSyncSerializer
|
||||
model = GithubCommentSync
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save(
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
issue_sync_id=self.kwargs.get("issue_sync_id"),
|
||||
)
|
||||
95
apiserver/plane/app/views/integration/slack.py
Normal file
95
apiserver/plane/app/views/integration/slack.py
Normal file
@@ -0,0 +1,95 @@
|
||||
# Django import
|
||||
from django.db import IntegrityError
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.app.views import BaseViewSet
|
||||
from plane.db.models import (
|
||||
SlackProjectSync,
|
||||
WorkspaceIntegration,
|
||||
ProjectMember,
|
||||
)
|
||||
from plane.app.serializers import SlackProjectSyncSerializer
|
||||
from plane.app.permissions import (
|
||||
ProjectBasePermission,
|
||||
)
|
||||
from plane.utils.integrations.slack import slack_oauth
|
||||
|
||||
|
||||
class SlackProjectSyncViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectBasePermission,
|
||||
]
|
||||
serializer_class = SlackProjectSyncSerializer
|
||||
model = SlackProjectSync
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
)
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, workspace_integration_id):
|
||||
try:
|
||||
code = request.data.get("code", False)
|
||||
|
||||
if not code:
|
||||
return Response(
|
||||
{"error": "Code is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
slack_response = slack_oauth(code=code)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
workspace__slug=slug, pk=workspace_integration_id
|
||||
)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
pk=workspace_integration_id, workspace__slug=slug
|
||||
)
|
||||
slack_project_sync = SlackProjectSync.objects.create(
|
||||
access_token=slack_response.get("access_token"),
|
||||
scopes=slack_response.get("scope"),
|
||||
bot_user_id=slack_response.get("bot_user_id"),
|
||||
webhook_url=slack_response.get("incoming_webhook", {}).get(
|
||||
"url"
|
||||
),
|
||||
data=slack_response,
|
||||
team_id=slack_response.get("team", {}).get("id"),
|
||||
team_name=slack_response.get("team", {}).get("name"),
|
||||
workspace_integration=workspace_integration,
|
||||
project_id=project_id,
|
||||
)
|
||||
_ = ProjectMember.objects.get_or_create(
|
||||
member=workspace_integration.actor,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
)
|
||||
serializer = SlackProjectSyncSerializer(slack_project_sync)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except IntegrityError as e:
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"error": "Slack is already installed for the project"},
|
||||
status=status.HTTP_410_GONE,
|
||||
)
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{
|
||||
"error": "Slack could not be installed. Please try again later"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
@@ -19,15 +19,17 @@ from django.views.decorators.gzip import gzip_page
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet
|
||||
from plane.app.serializers import (
|
||||
IssueSerializer,
|
||||
IssueFlatSerializer,
|
||||
IssueDetailSerializer,
|
||||
)
|
||||
from plane.app.permissions import (
|
||||
ProjectEntityPermission,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
IssueFlatSerializer,
|
||||
IssueSerializer,
|
||||
IssueDetailSerializer
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueAttachment,
|
||||
@@ -47,8 +49,6 @@ from plane.utils.paginator import (
|
||||
SubGroupedOffsetPaginator,
|
||||
)
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet, BaseAPIView
|
||||
|
||||
|
||||
class IssueArchiveViewSet(BaseViewSet):
|
||||
@@ -318,57 +318,3 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class BulkArchiveIssuesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def post(self, request, slug, project_id):
|
||||
issue_ids = request.data.get("issue_ids", [])
|
||||
|
||||
if not len(issue_ids):
|
||||
return Response(
|
||||
{"error": "Issue IDs are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
issues = Issue.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk__in=issue_ids
|
||||
).select_related("state")
|
||||
bulk_archive_issues = []
|
||||
for issue in issues:
|
||||
if issue.state.group not in ["completed", "cancelled"]:
|
||||
return Response(
|
||||
{
|
||||
"error_code": 4091,
|
||||
"error_message": "INVALID_ARCHIVE_STATE_GROUP"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"archived_at": str(timezone.now().date()),
|
||||
"automation": False,
|
||||
}
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
issue.archived_at = timezone.now().date()
|
||||
bulk_archive_issues.append(issue)
|
||||
Issue.objects.bulk_update(bulk_archive_issues, ["archived_at"])
|
||||
|
||||
return Response(
|
||||
{"archived_at": str(timezone.now().date())},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@@ -15,7 +15,7 @@ from .. import BaseAPIView
|
||||
from plane.app.serializers import IssueAttachmentSerializer
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import IssueAttachment
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
|
||||
class IssueAttachmentEndpoint(BaseAPIView):
|
||||
|
||||
@@ -35,7 +35,7 @@ from plane.app.serializers import (
|
||||
IssuePropertySerializer,
|
||||
IssueSerializer,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueAttachment,
|
||||
|
||||
@@ -22,7 +22,7 @@ from plane.db.models import (
|
||||
ProjectMember,
|
||||
CommentReaction,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
|
||||
class IssueCommentViewSet(BaseViewSet):
|
||||
|
||||
@@ -32,7 +32,7 @@ from plane.app.serializers import (
|
||||
IssueFlatSerializer,
|
||||
IssueSerializer,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueAttachment,
|
||||
|
||||
@@ -14,7 +14,7 @@ from .. import BaseViewSet
|
||||
from plane.app.serializers import IssueLinkSerializer
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import IssueLink
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
|
||||
class IssueLinkViewSet(BaseViewSet):
|
||||
|
||||
@@ -14,7 +14,7 @@ from .. import BaseViewSet
|
||||
from plane.app.serializers import IssueReactionSerializer
|
||||
from plane.app.permissions import ProjectLitePermission
|
||||
from plane.db.models import IssueReaction
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
|
||||
class IssueReactionViewSet(BaseViewSet):
|
||||
|
||||
@@ -21,7 +21,7 @@ from plane.db.models import (
|
||||
Project,
|
||||
IssueRelation,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
|
||||
class IssueRelationViewSet(BaseViewSet):
|
||||
|
||||
@@ -30,7 +30,7 @@ from plane.db.models import (
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.user_timezone_converter import user_timezone_converter
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
@@ -123,3 +123,4 @@ class IssueSubscriberViewSet(BaseViewSet):
|
||||
return Response(
|
||||
{"subscribed": issue_subscriber}, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ from plane.app.serializers import (
|
||||
ModuleUserPropertiesSerializer,
|
||||
ModuleWriteSerializer,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
Module,
|
||||
|
||||
@@ -23,7 +23,6 @@ from plane.app.permissions import (
|
||||
from plane.app.serializers import (
|
||||
ModuleIssueSerializer,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueAttachment,
|
||||
@@ -45,6 +44,22 @@ from plane.utils.paginator import (
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet
|
||||
from plane.app.serializers import (
|
||||
ModuleIssueSerializer,
|
||||
IssueSerializer,
|
||||
)
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import (
|
||||
ModuleIssue,
|
||||
Project,
|
||||
Issue,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
)
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.user_timezone_converter import user_timezone_converter
|
||||
|
||||
|
||||
class ModuleIssueViewSet(BaseViewSet):
|
||||
serializer_class = ModuleIssueSerializer
|
||||
@@ -250,7 +265,6 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
removed_modules = request.data.get("removed_modules", [])
|
||||
project = Project.objects.get(pk=project_id)
|
||||
|
||||
|
||||
if modules:
|
||||
_ = ModuleIssue.objects.bulk_create(
|
||||
[
|
||||
|
||||
345
apiserver/plane/app/views/page/workspace.py
Normal file
345
apiserver/plane/app/views/page/workspace.py
Normal file
@@ -0,0 +1,345 @@
|
||||
# Python imports
|
||||
import json
|
||||
import base64
|
||||
from datetime import datetime
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
# Django imports
|
||||
from django.db import connection
|
||||
from django.db.models import Exists, OuterRef, Q
|
||||
from django.http import StreamingHttpResponse
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
|
||||
# Module imports
|
||||
from plane.app.permissions import WorkspaceEntityPermission
|
||||
from plane.app.serializers import (
|
||||
WorkspacePageSerializer,
|
||||
PageDetailSerializer,
|
||||
WorkspacePageDetailSerializer,
|
||||
)
|
||||
from plane.db.models import (
|
||||
Page,
|
||||
UserFavorite,
|
||||
ProjectMember,
|
||||
Workspace,
|
||||
)
|
||||
|
||||
from ..base import BaseViewSet
|
||||
|
||||
from plane.bgtasks.page_transaction_task import page_transaction
|
||||
|
||||
|
||||
def unarchive_archive_page_and_descendants(page_id, archived_at):
|
||||
# Your SQL query
|
||||
sql = """
|
||||
WITH RECURSIVE descendants AS (
|
||||
SELECT id FROM pages WHERE id = %s
|
||||
UNION ALL
|
||||
SELECT pages.id FROM pages, descendants WHERE pages.parent_id = descendants.id
|
||||
)
|
||||
UPDATE pages SET archived_at = %s WHERE id IN (SELECT id FROM descendants);
|
||||
"""
|
||||
|
||||
# Execute the SQL query
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(sql, [page_id, archived_at])
|
||||
|
||||
|
||||
class WorkspacePageViewSet(BaseViewSet):
|
||||
serializer_class = WorkspacePageSerializer
|
||||
model = Page
|
||||
permission_classes = [
|
||||
WorkspaceEntityPermission,
|
||||
]
|
||||
search_fields = [
|
||||
"name",
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
subquery = UserFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
entity_type="page",
|
||||
entity_identifier=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(is_global=True)
|
||||
.filter(parent__isnull=True)
|
||||
.filter(Q(owned_by=self.request.user) | Q(access=0))
|
||||
.prefetch_related("projects")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.order_by(self.request.GET.get("order_by", "-created_at"))
|
||||
.prefetch_related("labels")
|
||||
.order_by("-is_favorite", "-created_at")
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def create(self, request, slug):
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
serializer = WorkspacePageSerializer(
|
||||
data=request.data,
|
||||
context={
|
||||
"owned_by_id": request.user.id,
|
||||
"description_html": request.data.get(
|
||||
"description_html", "<p></p>"
|
||||
),
|
||||
"workspace_id": workspace.id,
|
||||
},
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save(is_global=True)
|
||||
# capture the page transaction
|
||||
page_transaction.delay(request.data, None, serializer.data["id"])
|
||||
page = Page.objects.get(pk=serializer.data["id"])
|
||||
serializer = PageDetailSerializer(page)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def partial_update(self, request, slug, pk):
|
||||
try:
|
||||
page = Page.objects.get(
|
||||
pk=pk,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
if page.is_locked:
|
||||
return Response(
|
||||
{"error": "Page is locked"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
parent = request.data.get("parent", None)
|
||||
if parent:
|
||||
_ = Page.objects.get(
|
||||
pk=parent,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
# Only update access if the page owner is the requesting user
|
||||
if (
|
||||
page.access != request.data.get("access", page.access)
|
||||
and page.owned_by_id != request.user.id
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error": "Access cannot be updated since this page is owned by someone else"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = PageDetailSerializer(
|
||||
page, data=request.data, partial=True
|
||||
)
|
||||
page_description = page.description_html
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
# capture the page transaction
|
||||
if request.data.get("description_html"):
|
||||
page_transaction.delay(
|
||||
new_value=request.data,
|
||||
old_value=json.dumps(
|
||||
{
|
||||
"description_html": page_description,
|
||||
},
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
page_id=pk,
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(
|
||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Page.DoesNotExist:
|
||||
return Response(
|
||||
{
|
||||
"error": "Access cannot be updated since this page is owned by someone else"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def retrieve(self, request, slug, pk=None):
|
||||
page = self.get_queryset().filter(pk=pk).first()
|
||||
if page is None:
|
||||
return Response(
|
||||
{"error": "Page not found"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
else:
|
||||
return Response(
|
||||
WorkspacePageDetailSerializer(page).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def lock(self, request, slug, pk):
|
||||
page = Page.objects.filter(
|
||||
pk=pk,
|
||||
workspace__slug=slug,
|
||||
).first()
|
||||
|
||||
page.is_locked = True
|
||||
page.save()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def unlock(self, request, slug, pk):
|
||||
page = Page.objects.filter(
|
||||
pk=pk,
|
||||
workspace__slug=slug,
|
||||
).first()
|
||||
|
||||
page.is_locked = False
|
||||
page.save()
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def list(self, request, slug):
|
||||
queryset = self.get_queryset()
|
||||
pages = WorkspacePageSerializer(queryset, many=True).data
|
||||
return Response(pages, status=status.HTTP_200_OK)
|
||||
|
||||
def archive(self, request, slug, pk):
|
||||
page = Page.objects.get(
|
||||
pk=pk,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
# only the owner or admin can archive the page
|
||||
if (
|
||||
ProjectMember.objects.filter(
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
role__lte=15,
|
||||
).exists()
|
||||
and request.user.id != page.owned_by_id
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only the owner or admin can archive the page"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
unarchive_archive_page_and_descendants(pk, datetime.now())
|
||||
|
||||
return Response(
|
||||
{"archived_at": str(datetime.now())},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def unarchive(self, request, slug, pk):
|
||||
page = Page.objects.get(
|
||||
pk=pk,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
# only the owner or admin can un archive the page
|
||||
if (
|
||||
ProjectMember.objects.filter(
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
role__lte=15,
|
||||
).exists()
|
||||
and request.user.id != page.owned_by_id
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only the owner or admin can un archive the page"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# if parent page is archived then the page will be un archived breaking the hierarchy
|
||||
if page.parent_id and page.parent.archived_at:
|
||||
page.parent = None
|
||||
page.save(update_fields=["parent"])
|
||||
|
||||
unarchive_archive_page_and_descendants(pk, None)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def destroy(self, request, slug, pk):
|
||||
page = Page.objects.get(
|
||||
pk=pk,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
# only the owner and admin can delete the page
|
||||
if (
|
||||
ProjectMember.objects.filter(
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
role__gt=20,
|
||||
).exists()
|
||||
or request.user.id != page.owned_by_id
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only the owner and admin can delete the page"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if page.archived_at is None:
|
||||
return Response(
|
||||
{"error": "The page should be archived before deleting"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# remove parent from all the children
|
||||
_ = Page.objects.filter(parent_id=pk, workspace__slug=slug).update(
|
||||
parent=None
|
||||
)
|
||||
|
||||
page.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class WorkspacePagesDescriptionViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
WorkspaceEntityPermission,
|
||||
]
|
||||
|
||||
def retrieve(self, request, slug, pk):
|
||||
page = Page.objects.get(
|
||||
pk=pk,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
binary_data = page.description_binary
|
||||
|
||||
def stream_data():
|
||||
if binary_data:
|
||||
yield binary_data
|
||||
else:
|
||||
yield b""
|
||||
|
||||
response = StreamingHttpResponse(
|
||||
stream_data(), content_type="application/octet-stream"
|
||||
)
|
||||
response["Content-Disposition"] = (
|
||||
'attachment; filename="page_description.bin"'
|
||||
)
|
||||
return response
|
||||
|
||||
def partial_update(self, request, slug, pk):
|
||||
page = Page.objects.get(
|
||||
pk=pk,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
base64_data = request.data.get("description_binary")
|
||||
|
||||
if base64_data:
|
||||
# Decode the base64 data to bytes
|
||||
new_binary_data = base64.b64decode(base64_data)
|
||||
|
||||
# Store the updated binary data
|
||||
page.description_binary = new_binary_data
|
||||
page.description_html = request.data.get("description_html")
|
||||
page.save()
|
||||
return Response({"message": "Updated successfully"})
|
||||
else:
|
||||
return Response({"error": "No binary data provided"})
|
||||
@@ -27,6 +27,7 @@ from plane.db.models import (
|
||||
WorkspaceMember,
|
||||
IssueProperty,
|
||||
)
|
||||
from plane.payment.bgtasks.member_sync_task import member_sync_task
|
||||
|
||||
|
||||
class ProjectInvitationsViewset(BaseViewSet):
|
||||
@@ -247,6 +248,9 @@ class ProjectJoinEndpoint(BaseAPIView):
|
||||
workspace_member.is_active = True
|
||||
workspace_member.save()
|
||||
|
||||
# Sync workspace members
|
||||
member_sync_task.delay(slug)
|
||||
|
||||
# Check if the user was already a member of project then activate the user
|
||||
project_member = ProjectMember.objects.filter(
|
||||
workspace_id=project_invite.workspace_id, member=user
|
||||
|
||||
@@ -22,6 +22,7 @@ from plane.db.models import (
|
||||
Page,
|
||||
IssueView,
|
||||
ProjectPage,
|
||||
ProjectMember,
|
||||
)
|
||||
|
||||
|
||||
@@ -260,3 +261,201 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
func = MODELS_MAPPER.get(model, None)
|
||||
results[model] = func(query, slug, project_id, workspace_search)
|
||||
return Response({"results": results}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class SearchEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, project_id):
|
||||
query = request.query_params.get("query", False)
|
||||
query_type = request.query_params.get("query_type", "issue")
|
||||
count = int(request.query_params.get("count", 5))
|
||||
|
||||
if query_type == "mention":
|
||||
fields = ["member__first_name", "member__last_name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
users = (
|
||||
ProjectMember.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.values(
|
||||
"member__first_name",
|
||||
"member__last_name",
|
||||
"member__avatar",
|
||||
"member__display_name",
|
||||
"member__id",
|
||||
)[:count]
|
||||
)
|
||||
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
pages = (
|
||||
Page.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
access=0,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.values("name", "id")[:count]
|
||||
)
|
||||
return Response(
|
||||
{"users": users, "pages": pages}, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
if query_type == "project":
|
||||
fields = ["name", "identifier"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
projects = (
|
||||
Project.objects.filter(
|
||||
q,
|
||||
Q(project_projectmember__member=self.request.user)
|
||||
| Q(network=2),
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values("name", "id", "identifier", "workspace__slug")[:count]
|
||||
)
|
||||
return Response(projects, status=status.HTTP_200_OK)
|
||||
|
||||
if query_type == "issue":
|
||||
fields = ["name", "sequence_id", "project__identifier"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
if field == "sequence_id":
|
||||
# Match whole integers only (exclude decimal numbers)
|
||||
sequences = re.findall(r"\b\d+\b", query)
|
||||
for sequence_id in sequences:
|
||||
q |= Q(**{"sequence_id": sequence_id})
|
||||
else:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
issues = (
|
||||
Issue.issue_objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"priority",
|
||||
"state_id",
|
||||
)[:count]
|
||||
)
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
|
||||
if query_type == "cycle":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
cycles = (
|
||||
Cycle.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)[:count]
|
||||
)
|
||||
return Response(cycles, status=status.HTTP_200_OK)
|
||||
|
||||
if query_type == "module":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
modules = (
|
||||
Module.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)[:count]
|
||||
)
|
||||
return Response(modules, status=status.HTTP_200_OK)
|
||||
|
||||
if query_type == "page":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
pages = (
|
||||
Page.objects.filter(
|
||||
q,
|
||||
projects__project_projectmember__member=self.request.user,
|
||||
projects__project_projectmember__is_active=True,
|
||||
projects__id=project_id,
|
||||
workspace__slug=slug,
|
||||
access=0,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"projects__id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)[:count]
|
||||
)
|
||||
return Response(pages, status=status.HTTP_200_OK)
|
||||
|
||||
return Response(
|
||||
{"error": "Please provide a valid query"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
# Python imports
|
||||
import re
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Q
|
||||
|
||||
@@ -11,13 +8,7 @@ from rest_framework.response import Response
|
||||
# Module imports
|
||||
from .base import BaseAPIView
|
||||
from plane.db.models import (
|
||||
Workspace,
|
||||
Project,
|
||||
Issue,
|
||||
Cycle,
|
||||
Module,
|
||||
Page,
|
||||
IssueView,
|
||||
)
|
||||
from plane.utils.issue_search import search_issues
|
||||
|
||||
|
||||
138
apiserver/plane/app/views/search/workspace.py
Normal file
138
apiserver/plane/app/views/search/workspace.py
Normal file
@@ -0,0 +1,138 @@
|
||||
# Python imports
|
||||
import re
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Q
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.app.views import BaseAPIView
|
||||
from plane.db.models import Workspace, Page, Issue
|
||||
from plane.app.permissions import WorkspaceEntityPermission
|
||||
|
||||
|
||||
class WorkspaceSearchEndpoint(BaseAPIView):
|
||||
"""Endpoint to search across multiple fields in the workspace and
|
||||
also show related workspace if found
|
||||
"""
|
||||
|
||||
permission_classes = [
|
||||
WorkspaceEntityPermission,
|
||||
]
|
||||
|
||||
def filter_workspaces(self, query, slug):
|
||||
"""Filter workspaces based on the query"""
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Workspace.objects.filter(
|
||||
q, workspace_member__member=self.request.user
|
||||
)
|
||||
.distinct()
|
||||
.values("name", "id", "slug")
|
||||
)
|
||||
|
||||
def filter_pages(self, query, slug):
|
||||
"""Filter pages based on the query"""
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Page.objects.filter(
|
||||
q,
|
||||
workspace__slug=slug,
|
||||
archived_at__isnull=True,
|
||||
is_global=True,
|
||||
)
|
||||
.filter(
|
||||
Q(
|
||||
owned_by=self.request.user,
|
||||
)
|
||||
| Q(access=0)
|
||||
)
|
||||
.distinct()
|
||||
.values("name", "id", "workspace__slug")
|
||||
)
|
||||
|
||||
def get(self, request, slug):
|
||||
query = request.GET.get("search", False)
|
||||
if not query:
|
||||
return Response(
|
||||
{"error": "Search query is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
MODELS_MAPPER = {
|
||||
"workspace": self.filter_workspaces,
|
||||
"page": self.filter_pages,
|
||||
}
|
||||
|
||||
results = {}
|
||||
|
||||
for model in MODELS_MAPPER.keys():
|
||||
func = MODELS_MAPPER.get(model, None)
|
||||
results[model] = func(query, slug)
|
||||
return Response({"results": results}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class WorkspaceEntitySearchEndpoint(BaseAPIView):
|
||||
|
||||
permission_classes = [
|
||||
WorkspaceEntityPermission,
|
||||
]
|
||||
|
||||
def filter_issues(self, slug, query, count):
|
||||
"""Filter issues based on the query"""
|
||||
fields = ["name", "sequence_id", "project__identifier"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
if field == "sequence_id":
|
||||
# Match whole integers only (exclude decimal numbers)
|
||||
sequences = re.findall(r"\b\d+\b", query)
|
||||
for sequence_id in sequences:
|
||||
q |= Q(**{"sequence_id": sequence_id})
|
||||
else:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
issues = (
|
||||
Issue.issue_objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"priority",
|
||||
"state_id",
|
||||
)[:count]
|
||||
)
|
||||
|
||||
return issues
|
||||
|
||||
def get(self, request, slug):
|
||||
query = request.query_params.get("query", False)
|
||||
query_type = request.query_params.get("query_type", "issue")
|
||||
count = int(request.query_params.get("count", 5))
|
||||
|
||||
MODELS_MAPPER = {
|
||||
"issue": self.filter_issues,
|
||||
}
|
||||
|
||||
func = MODELS_MAPPER.get(query_type, None)
|
||||
results = func(slug, query, count)
|
||||
return Response(results, status=status.HTTP_200_OK)
|
||||
@@ -35,6 +35,7 @@ from plane.license.models import Instance, InstanceAdmin
|
||||
from plane.utils.cache import cache_response, invalidate_cache
|
||||
from plane.utils.paginator import BasePaginator
|
||||
from plane.authentication.utils.host import user_ip
|
||||
from plane.payment.bgtasks.member_sync_task import member_sync_task
|
||||
|
||||
|
||||
class UserEndpoint(BaseViewSet):
|
||||
@@ -159,6 +160,12 @@ class UserEndpoint(BaseViewSet):
|
||||
workspaces_to_deactivate, ["is_active"], batch_size=100
|
||||
)
|
||||
|
||||
# Sync workspace members
|
||||
[
|
||||
member_sync_task.delay(workspace.workspace.slug)
|
||||
for workspace in workspaces_to_deactivate
|
||||
]
|
||||
|
||||
# Delete all workspace invites
|
||||
WorkspaceMemberInvite.objects.filter(
|
||||
email=user.email,
|
||||
|
||||
@@ -44,6 +44,7 @@ from plane.db.models import (
|
||||
WorkspaceTheme,
|
||||
)
|
||||
from plane.utils.cache import cache_response, invalidate_cache
|
||||
from plane.payment.bgtasks.member_sync_task import member_sync_task
|
||||
|
||||
|
||||
class WorkSpaceViewSet(BaseViewSet):
|
||||
@@ -127,6 +128,9 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
role=20,
|
||||
company_role=request.data.get("company_role", ""),
|
||||
)
|
||||
|
||||
# Sync workspace members
|
||||
member_sync_task.delay(slug)
|
||||
return Response(
|
||||
serializer.data, status=status.HTTP_201_CREATED
|
||||
)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Python imports
|
||||
from datetime import datetime
|
||||
|
||||
import uuid
|
||||
import jwt
|
||||
|
||||
# Django imports
|
||||
@@ -22,7 +22,7 @@ from plane.app.serializers import (
|
||||
WorkSpaceMemberSerializer,
|
||||
)
|
||||
from plane.app.views.base import BaseAPIView
|
||||
from plane.bgtasks.event_tracking_task import workspace_invite_event
|
||||
from plane.bgtasks.event_tracking_task import track_event
|
||||
from plane.bgtasks.workspace_invitation_task import workspace_invitation
|
||||
from plane.db.models import (
|
||||
User,
|
||||
@@ -31,7 +31,7 @@ from plane.db.models import (
|
||||
WorkspaceMemberInvite,
|
||||
)
|
||||
from plane.utils.cache import invalidate_cache, invalidate_cache_directly
|
||||
|
||||
from plane.payment.bgtasks.member_sync_task import member_sync_task
|
||||
from .. import BaseViewSet
|
||||
|
||||
|
||||
@@ -227,15 +227,25 @@ class WorkspaceJoinEndpoint(BaseAPIView):
|
||||
workspace_invite.delete()
|
||||
|
||||
# Send event
|
||||
workspace_invite_event.delay(
|
||||
user=user.id if user is not None else None,
|
||||
track_event.delay(
|
||||
email=email,
|
||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
event_name="MEMBER_ACCEPTED",
|
||||
accepted_from="EMAIL",
|
||||
properties={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"user": {"email": email, "id": str(user)},
|
||||
"device_ctx": {
|
||||
"ip": request.META.get("REMOTE_ADDR", None),
|
||||
"user_agent": request.META.get(
|
||||
"HTTP_USER_AGENT", None
|
||||
),
|
||||
},
|
||||
"accepted_from": "EMAIL",
|
||||
},
|
||||
)
|
||||
|
||||
# sync workspace members
|
||||
member_sync_task.delay(slug)
|
||||
|
||||
return Response(
|
||||
{"message": "Workspace Invitation Accepted"},
|
||||
status=status.HTTP_200_OK,
|
||||
@@ -308,6 +318,12 @@ class UserWorkspaceInvitationsViewSet(BaseViewSet):
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Sync workspace members
|
||||
[
|
||||
member_sync_task.delay(invitation.workspace.slug)
|
||||
for invitation in workspace_invitations
|
||||
]
|
||||
|
||||
# Delete joined workspace invites
|
||||
workspace_invitations.delete()
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ from plane.db.models import (
|
||||
WorkspaceMember,
|
||||
)
|
||||
from plane.utils.cache import cache_response, invalidate_cache
|
||||
|
||||
from plane.payment.bgtasks.member_sync_task import member_sync_task
|
||||
from .. import BaseViewSet
|
||||
|
||||
|
||||
@@ -221,6 +221,10 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
|
||||
workspace_member.is_active = False
|
||||
workspace_member.save()
|
||||
|
||||
# Sync workspace members
|
||||
member_sync_task.delay(slug)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@invalidate_cache(
|
||||
@@ -288,6 +292,9 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
# # Deactivate the user
|
||||
workspace_member.is_active = False
|
||||
workspace_member.save()
|
||||
|
||||
# # Sync workspace members
|
||||
member_sync_task.delay(slug)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
|
||||
@@ -40,6 +40,12 @@ AUTHENTICATION_ERROR_CODES = {
|
||||
"GOOGLE_OAUTH_PROVIDER_ERROR": 5115,
|
||||
"GITHUB_OAUTH_PROVIDER_ERROR": 5120,
|
||||
"GITLAB_OAUTH_PROVIDER_ERROR": 5121,
|
||||
# OIDC
|
||||
"OIDC_NOT_CONFIGURED": 5190,
|
||||
"OIDC_PROVIDER_ERROR": 5195,
|
||||
# SAML
|
||||
"SAML_NOT_CONFIGURED": 5190,
|
||||
"SAML_PROVIDER_ERROR": 5195,
|
||||
# Reset Password
|
||||
"INVALID_PASSWORD_TOKEN": 5125,
|
||||
"EXPIRED_PASSWORD_TOKEN": 5130,
|
||||
@@ -47,7 +53,7 @@ AUTHENTICATION_ERROR_CODES = {
|
||||
"INCORRECT_OLD_PASSWORD": 5135,
|
||||
"MISSING_PASSWORD": 5138,
|
||||
"INVALID_NEW_PASSWORD": 5140,
|
||||
# set passowrd
|
||||
# set password
|
||||
"PASSWORD_ALREADY_SET": 5145,
|
||||
# Admin
|
||||
"ADMIN_ALREADY_EXIST": 5150,
|
||||
|
||||
209
apiserver/plane/authentication/adapter/saml.py
Normal file
209
apiserver/plane/authentication/adapter/saml.py
Normal file
@@ -0,0 +1,209 @@
|
||||
# Python imports
|
||||
import os
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from onelogin.saml2.auth import OneLogin_Saml2_Auth
|
||||
|
||||
# Module imports
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from .base import Adapter
|
||||
from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
|
||||
|
||||
class SAMLAdapter(Adapter):
|
||||
|
||||
provider = "saml"
|
||||
auth = None
|
||||
saml_config = {}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
request,
|
||||
):
|
||||
(
|
||||
SAML_ENTITY_ID,
|
||||
SAML_SSO_URL,
|
||||
SAML_LOGOUT_URL,
|
||||
SAML_CERTIFICATE,
|
||||
) = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "SAML_ENTITY_ID",
|
||||
"default": os.environ.get("SAML_ENTITY_ID"),
|
||||
},
|
||||
{
|
||||
"key": "SAML_SSO_URL",
|
||||
"default": os.environ.get("SAML_SSO_URL"),
|
||||
},
|
||||
{
|
||||
"key": "SAML_LOGOUT_URL",
|
||||
"default": os.environ.get("SAML_LOGOUT_URL"),
|
||||
},
|
||||
{
|
||||
"key": "SAML_CERTIFICATE",
|
||||
"default": os.environ.get("SAML_CERTIFICATE"),
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
if not (SAML_ENTITY_ID and SAML_SSO_URL and SAML_CERTIFICATE):
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["SAML_NOT_CONFIGURED"],
|
||||
error_message="SAML_NOT_CONFIGURED",
|
||||
)
|
||||
|
||||
super().__init__(request, self.provider)
|
||||
req = self.prepare_saml_request(self.request)
|
||||
saml_config = self.generate_saml_configuration(
|
||||
request=request,
|
||||
entity_id=SAML_ENTITY_ID,
|
||||
sso_url=SAML_SSO_URL,
|
||||
logout_url=SAML_LOGOUT_URL,
|
||||
idp_certificate=SAML_CERTIFICATE,
|
||||
)
|
||||
|
||||
# Generate configuration
|
||||
self.saml_config = saml_config
|
||||
auth = OneLogin_Saml2_Auth(
|
||||
req,
|
||||
saml_config,
|
||||
)
|
||||
self.auth = auth
|
||||
|
||||
def generate_saml_configuration(
|
||||
self,
|
||||
request,
|
||||
entity_id,
|
||||
sso_url,
|
||||
logout_url,
|
||||
idp_certificate,
|
||||
):
|
||||
return {
|
||||
"strict": True,
|
||||
"debug": settings.DEBUG,
|
||||
"sp": {
|
||||
"entityId": f"{request.scheme}://{request.get_host()}/auth/saml/metadata/",
|
||||
"assertionConsumerService": {
|
||||
"url": f"{request.scheme}://{request.get_host()}/auth/saml/callback/",
|
||||
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST",
|
||||
},
|
||||
},
|
||||
"idp": {
|
||||
"entityId": entity_id,
|
||||
"singleSignOnService": {
|
||||
"url": sso_url,
|
||||
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect",
|
||||
},
|
||||
"singleLogoutService": {
|
||||
"url": logout_url,
|
||||
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect",
|
||||
},
|
||||
"x509cert": idp_certificate,
|
||||
},
|
||||
"attributeConsumingService": {
|
||||
"serviceName": "Plane SAML",
|
||||
"serviceDescription": "Plane SAML",
|
||||
"requestedAttributes": [
|
||||
{
|
||||
"name": "first_name",
|
||||
"friendlyName": "user.firstName",
|
||||
"isRequired": False,
|
||||
"nameFormat": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic",
|
||||
},
|
||||
{
|
||||
"name": "last_name",
|
||||
"friendlyName": "user.lastName",
|
||||
"isRequired": False,
|
||||
"nameFormat": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic",
|
||||
},
|
||||
{
|
||||
"name": "email",
|
||||
"friendlyName": "user.email",
|
||||
"isRequired": True,
|
||||
"nameFormat": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
def prepare_saml_request(self, request):
|
||||
return {
|
||||
"https": "on" if request.is_secure() else "off",
|
||||
"http_host": request.get_host(),
|
||||
"script_name": request.path,
|
||||
"get_data": request.GET.copy(),
|
||||
"post_data": request.POST.copy(),
|
||||
}
|
||||
|
||||
def get_auth_url(self):
|
||||
return self.auth.login()
|
||||
|
||||
def authenticate(self):
|
||||
self.auth.process_response()
|
||||
errors = self.auth.get_errors()
|
||||
if errors:
|
||||
if not self.auth.is_authenticated():
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES[
|
||||
"SAML_PROVIDER_ERROR"
|
||||
],
|
||||
error_message="SAML_PROVIDER_ERROR",
|
||||
)
|
||||
raise AuthenticationException(
|
||||
error_message=AUTHENTICATION_ERROR_CODES[
|
||||
"SAML_PROVIDER_ERROR"
|
||||
],
|
||||
error_code="SAML_PROVIDER_ERROR",
|
||||
)
|
||||
attributes = self.auth.get_attributes()
|
||||
|
||||
email = (
|
||||
attributes.get("email")[0]
|
||||
if attributes.get("email") and len(attributes.get("email"))
|
||||
else None
|
||||
)
|
||||
|
||||
if not email:
|
||||
raise AuthenticationException(
|
||||
error_message=AUTHENTICATION_ERROR_CODES[
|
||||
"SAML_PROVIDER_ERROR"
|
||||
],
|
||||
error_code="SAML_PROVIDER_ERROR",
|
||||
)
|
||||
|
||||
first_name = (
|
||||
attributes.get("first_name")[0]
|
||||
if attributes.get("first_name")
|
||||
and len(attributes.get("first_name"))
|
||||
else ""
|
||||
)
|
||||
|
||||
last_name = (
|
||||
attributes.get("last_name")[0]
|
||||
if attributes.get("last_name") and len(attributes.get("last_name"))
|
||||
else ""
|
||||
)
|
||||
|
||||
super().set_user_data(
|
||||
{
|
||||
"email": email,
|
||||
"user": {
|
||||
"first_name": first_name,
|
||||
"last_name": last_name,
|
||||
"email": email,
|
||||
},
|
||||
}
|
||||
)
|
||||
return self.complete_login_or_signup()
|
||||
|
||||
def logout(self):
|
||||
try:
|
||||
return self.auth.logout()
|
||||
except Exception:
|
||||
return False
|
||||
@@ -6,6 +6,9 @@ from urllib.parse import urlencode
|
||||
import pytz
|
||||
import requests
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Module imports
|
||||
from plane.authentication.adapter.oauth import OauthAdapter
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
@@ -46,7 +49,15 @@ class GitHubOAuthProvider(OauthAdapter):
|
||||
client_id = GITHUB_CLIENT_ID
|
||||
client_secret = GITHUB_CLIENT_SECRET
|
||||
|
||||
redirect_uri = f"""{"https" if request.is_secure() else "http"}://{request.get_host()}/auth/github/callback/"""
|
||||
scheme = (
|
||||
"https"
|
||||
if settings.IS_HEROKU
|
||||
else "https" if request.is_secure() else "http"
|
||||
)
|
||||
|
||||
redirect_uri = (
|
||||
f"""{scheme}://{request.get_host()}/auth/github/callback/"""
|
||||
)
|
||||
url_params = {
|
||||
"client_id": client_id,
|
||||
"redirect_uri": redirect_uri,
|
||||
|
||||
@@ -5,6 +5,9 @@ from urllib.parse import urlencode
|
||||
|
||||
import pytz
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Module imports
|
||||
from plane.authentication.adapter.oauth import OauthAdapter
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
@@ -43,7 +46,15 @@ class GoogleOAuthProvider(OauthAdapter):
|
||||
client_id = GOOGLE_CLIENT_ID
|
||||
client_secret = GOOGLE_CLIENT_SECRET
|
||||
|
||||
redirect_uri = f"""{"https" if request.is_secure() else "http"}://{request.get_host()}/auth/google/callback/"""
|
||||
scheme = (
|
||||
"https"
|
||||
if settings.IS_HEROKU
|
||||
else "https" if request.is_secure() else "http"
|
||||
)
|
||||
|
||||
redirect_uri = (
|
||||
f"""{scheme}://{request.get_host()}/auth/google/callback/"""
|
||||
)
|
||||
url_params = {
|
||||
"client_id": client_id,
|
||||
"scope": self.scope,
|
||||
|
||||
158
apiserver/plane/authentication/provider/oauth/oidc.py
Normal file
158
apiserver/plane/authentication/provider/oauth/oidc.py
Normal file
@@ -0,0 +1,158 @@
|
||||
# Python imports
|
||||
import os
|
||||
from datetime import datetime
|
||||
from urllib.parse import urlencode
|
||||
import pytz
|
||||
|
||||
# Module imports
|
||||
from plane.authentication.adapter.oauth import OauthAdapter
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
from plane.db.models import Account
|
||||
|
||||
|
||||
class OIDCOAuthProvider(OauthAdapter):
|
||||
|
||||
provider = "oidc"
|
||||
scope = "openid email profile"
|
||||
|
||||
def __init__(self, request, code=None, state=None):
|
||||
(
|
||||
OIDC_CLIENT_ID,
|
||||
OIDC_CLIENT_SECRET,
|
||||
OIDC_TOKEN_URL,
|
||||
OIDC_USERINFO_URL,
|
||||
OIDC_AUTHORIZE_URL,
|
||||
) = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "OIDC_CLIENT_ID",
|
||||
"default": os.environ.get("OIDC_CLIENT_ID"),
|
||||
},
|
||||
{
|
||||
"key": "OIDC_CLIENT_SECRET",
|
||||
"default": os.environ.get("OIDC_CLIENT_SECRET"),
|
||||
},
|
||||
{
|
||||
"key": "OIDC_TOKEN_URL",
|
||||
"default": os.environ.get("OIDC_TOKEN_URL"),
|
||||
},
|
||||
{
|
||||
"key": "OIDC_USERINFO_URL",
|
||||
"default": os.environ.get("OIDC_USERINFO_URL"),
|
||||
},
|
||||
{
|
||||
"key": "OIDC_AUTHORIZE_URL",
|
||||
"default": os.environ.get("OIDC_AUTHORIZE_URL"),
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
if not (
|
||||
OIDC_CLIENT_ID
|
||||
and OIDC_CLIENT_SECRET
|
||||
and OIDC_TOKEN_URL
|
||||
and OIDC_USERINFO_URL
|
||||
and OIDC_AUTHORIZE_URL
|
||||
):
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["OIDC_NOT_CONFIGURED"],
|
||||
error_message="OIDC_NOT_CONFIGURED",
|
||||
)
|
||||
|
||||
redirect_uri = (
|
||||
f"{request.scheme}://{request.get_host()}/auth/oidc/callback/"
|
||||
)
|
||||
url_params = {
|
||||
"client_id": OIDC_CLIENT_ID,
|
||||
"response_type": "code",
|
||||
"redirect_uri": redirect_uri,
|
||||
"state": state,
|
||||
"scope": self.scope,
|
||||
}
|
||||
auth_url = f"{OIDC_AUTHORIZE_URL}?{urlencode(url_params)}"
|
||||
super().__init__(
|
||||
request,
|
||||
self.provider,
|
||||
OIDC_CLIENT_ID,
|
||||
self.scope,
|
||||
redirect_uri,
|
||||
auth_url,
|
||||
OIDC_TOKEN_URL,
|
||||
OIDC_USERINFO_URL,
|
||||
OIDC_CLIENT_SECRET,
|
||||
code,
|
||||
)
|
||||
|
||||
def set_token_data(self):
|
||||
data = {
|
||||
"code": self.code,
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret,
|
||||
"redirect_uri": self.redirect_uri,
|
||||
"grant_type": "authorization_code",
|
||||
}
|
||||
token_response = self.get_user_token(
|
||||
data=data,
|
||||
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
||||
)
|
||||
super().set_token_data(
|
||||
{
|
||||
"access_token": token_response.get("access_token"),
|
||||
"refresh_token": token_response.get("refresh_token", None),
|
||||
"access_token_expired_at": (
|
||||
datetime.fromtimestamp(
|
||||
token_response.get("expires_in"),
|
||||
tz=pytz.utc,
|
||||
)
|
||||
if token_response.get("expires_in")
|
||||
else None
|
||||
),
|
||||
"refresh_token_expired_at": (
|
||||
datetime.fromtimestamp(
|
||||
token_response.get("refresh_token_expired_at"),
|
||||
tz=pytz.utc,
|
||||
)
|
||||
if token_response.get("refresh_token_expired_at")
|
||||
else None
|
||||
),
|
||||
"id_token": token_response.get("id_token", ""),
|
||||
}
|
||||
)
|
||||
|
||||
def set_user_data(self):
|
||||
user_info_response = self.get_user_response()
|
||||
user_data = {
|
||||
"email": user_info_response.get("email"),
|
||||
"user": {
|
||||
"avatar": user_info_response.get("picture"),
|
||||
"first_name": user_info_response.get("given_name"),
|
||||
"last_name": user_info_response.get("family_name"),
|
||||
"provider_id": user_info_response.get("sub"),
|
||||
"is_password_autoset": True,
|
||||
},
|
||||
}
|
||||
super().set_user_data(user_data)
|
||||
|
||||
def logout(self, logout_url=None):
|
||||
(OIDC_LOGOUT_URL,) = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "OIDC_LOGOUT_URL",
|
||||
"default": os.environ.get("OIDC_LOGOUT_URL"),
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
account = Account.objects.filter(
|
||||
user=self.request.user, provider=self.provider
|
||||
).first()
|
||||
|
||||
id_token = account.id_token if account and account.id_token else None
|
||||
if OIDC_LOGOUT_URL and id_token and logout_url:
|
||||
return f"{OIDC_LOGOUT_URL}?id_token_hint={id_token}&post_logout_redirect_uri={logout_url}"
|
||||
else:
|
||||
return False
|
||||
@@ -22,6 +22,15 @@ from .views import (
|
||||
SignUpAuthEndpoint,
|
||||
ForgotPasswordSpaceEndpoint,
|
||||
ResetPasswordSpaceEndpoint,
|
||||
# OIDC
|
||||
OIDCAuthInitiateEndpoint,
|
||||
OIDCallbackEndpoint,
|
||||
OIDCLogoutEndpoint,
|
||||
# SAML
|
||||
SAMLAuthInitiateEndpoint,
|
||||
SAMLCallbackEndpoint,
|
||||
SAMLMetadataEndpoint,
|
||||
SAMLLogoutEndpoint,
|
||||
# Space
|
||||
EmailCheckSpaceEndpoint,
|
||||
GitLabCallbackSpaceEndpoint,
|
||||
@@ -218,4 +227,41 @@ urlpatterns = [
|
||||
SetUserPasswordEndpoint.as_view(),
|
||||
name="set-password",
|
||||
),
|
||||
# OIDC
|
||||
path(
|
||||
"oidc/",
|
||||
OIDCAuthInitiateEndpoint.as_view(),
|
||||
name="oidc",
|
||||
),
|
||||
path(
|
||||
"oidc/callback/",
|
||||
OIDCallbackEndpoint.as_view(),
|
||||
name="oidc",
|
||||
),
|
||||
path(
|
||||
"oidc/logout/",
|
||||
OIDCLogoutEndpoint.as_view(),
|
||||
name="oidc",
|
||||
),
|
||||
# SAML
|
||||
path(
|
||||
"saml/",
|
||||
SAMLAuthInitiateEndpoint.as_view(),
|
||||
name="saml",
|
||||
),
|
||||
path(
|
||||
"saml/callback/",
|
||||
SAMLCallbackEndpoint.as_view(),
|
||||
name="saml",
|
||||
),
|
||||
path(
|
||||
"saml/metadata/",
|
||||
SAMLMetadataEndpoint.as_view(),
|
||||
name="saml",
|
||||
),
|
||||
path(
|
||||
"saml/logout/",
|
||||
SAMLLogoutEndpoint.as_view(),
|
||||
name="saml",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,4 +1,9 @@
|
||||
# Python imports
|
||||
import uuid
|
||||
|
||||
# Module imports
|
||||
from .workspace_project_join import process_workspace_project_invitations
|
||||
from plane.bgtasks.event_tracking_task import track_event
|
||||
|
||||
|
||||
def post_user_auth_workflow(
|
||||
@@ -6,4 +11,28 @@ def post_user_auth_workflow(
|
||||
is_signup,
|
||||
request,
|
||||
):
|
||||
# Process workspace project invitations
|
||||
process_workspace_project_invitations(user=user)
|
||||
# track events
|
||||
|
||||
event_mapper = {
|
||||
"email": "Email",
|
||||
"google": "GOOGLE",
|
||||
"magic-code": "Magic link",
|
||||
"github": "GITHUB",
|
||||
}
|
||||
|
||||
track_event.delay(
|
||||
email=user.email,
|
||||
event_name="Sign up" if is_signup else "Sign in",
|
||||
properties={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"user": {"email": user.email, "id": str(user.id)},
|
||||
"device_ctx": {
|
||||
"ip": request.META.get("REMOTE_ADDR", None),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT", None),
|
||||
},
|
||||
"medium": event_mapper.get(user.last_login_medium, "Email"),
|
||||
"first_time": is_signup,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -5,6 +5,7 @@ from plane.db.models import (
|
||||
WorkspaceMemberInvite,
|
||||
)
|
||||
from plane.utils.cache import invalidate_cache_directly
|
||||
from plane.payment.bgtasks.member_sync_task import member_sync_task
|
||||
|
||||
|
||||
def process_workspace_project_invitations(user):
|
||||
@@ -37,6 +38,12 @@ def process_workspace_project_invitations(user):
|
||||
for workspace_member_invite in workspace_member_invites
|
||||
]
|
||||
|
||||
# Sync workspace members
|
||||
[
|
||||
member_sync_task.delay(workspace_member_invite.workspace.slug)
|
||||
for workspace_member_invite in workspace_member_invites
|
||||
]
|
||||
|
||||
# Check if user has any project invites
|
||||
project_member_invites = ProjectMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
@@ -78,6 +85,12 @@ def process_workspace_project_invitations(user):
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Sync workspace members
|
||||
[
|
||||
member_sync_task.delay(project_member_invite.workspace.slug)
|
||||
for project_member_invite in project_member_invites
|
||||
]
|
||||
|
||||
# Delete all the invites
|
||||
workspace_member_invites.delete()
|
||||
project_member_invites.delete()
|
||||
|
||||
@@ -28,6 +28,19 @@ from .app.magic import (
|
||||
MagicSignUpEndpoint,
|
||||
)
|
||||
|
||||
from .app.oidc import (
|
||||
OIDCAuthInitiateEndpoint,
|
||||
OIDCallbackEndpoint,
|
||||
OIDCLogoutEndpoint,
|
||||
)
|
||||
|
||||
from .app.saml import (
|
||||
SAMLAuthInitiateEndpoint,
|
||||
SAMLCallbackEndpoint,
|
||||
SAMLMetadataEndpoint,
|
||||
SAMLLogoutEndpoint,
|
||||
)
|
||||
|
||||
from .app.signout import SignOutAuthEndpoint
|
||||
|
||||
|
||||
|
||||
104
apiserver/plane/authentication/views/app/oidc.py
Normal file
104
apiserver/plane/authentication/views/app/oidc.py
Normal file
@@ -0,0 +1,104 @@
|
||||
# Python imports
|
||||
import uuid
|
||||
from urllib.parse import urlencode, urljoin
|
||||
|
||||
# Django imports
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.views import View
|
||||
from django.contrib.auth import logout
|
||||
|
||||
# Module imports
|
||||
from plane.authentication.provider.oauth.oidc import OIDCOAuthProvider
|
||||
from plane.authentication.utils.workspace_project_join import (
|
||||
process_workspace_project_invitations,
|
||||
)
|
||||
from plane.authentication.utils.redirection_path import get_redirection_path
|
||||
from plane.authentication.utils.login import user_login
|
||||
from plane.license.models import Instance
|
||||
from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
from plane.authentication.utils.host import base_host
|
||||
|
||||
|
||||
class OIDCAuthInitiateEndpoint(View):
|
||||
|
||||
def get(self, request):
|
||||
request.session["host"] = base_host(request=request, is_app=True)
|
||||
next_path = request.GET.get("next_path")
|
||||
if next_path:
|
||||
request.session["next_path"] = str(next_path)
|
||||
try:
|
||||
# Check instance configuration
|
||||
instance = Instance.objects.first()
|
||||
if instance is None or not instance.is_setup_done:
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES[
|
||||
"INSTANCE_NOT_CONFIGURED"
|
||||
],
|
||||
error_message="INSTANCE_NOT_CONFIGURED",
|
||||
)
|
||||
|
||||
state = uuid.uuid4().hex
|
||||
provider = OIDCOAuthProvider(request=request, state=state)
|
||||
request.session["state"] = state
|
||||
auth_url = provider.get_auth_url()
|
||||
return HttpResponseRedirect(auth_url)
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True),
|
||||
"?" + urlencode(params),
|
||||
)
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
|
||||
class OIDCallbackEndpoint(View):
|
||||
|
||||
def get(self, request):
|
||||
code = request.GET.get("code")
|
||||
state = request.GET.get("state")
|
||||
host = request.session.get("host")
|
||||
try:
|
||||
if state != request.session.get("state", ""):
|
||||
raise AuthenticationException(
|
||||
error_code="OIDC_PROVIDER_ERROR",
|
||||
error_message="OIDC_PROVIDER_ERROR",
|
||||
)
|
||||
|
||||
if not code:
|
||||
raise AuthenticationException(
|
||||
error_code="OIDC_PROVIDER_ERROR",
|
||||
error_message="OIDC_PROVIDER_ERROR",
|
||||
)
|
||||
|
||||
provider = OIDCOAuthProvider(
|
||||
request=request,
|
||||
code=code,
|
||||
)
|
||||
user = provider.authenticate()
|
||||
# Login the user and record his device info
|
||||
user_login(request=request, user=user)
|
||||
# Process workspace and project invitations
|
||||
process_workspace_project_invitations(user=user)
|
||||
# Get the redirection path
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
url = urljoin(host, path)
|
||||
return HttpResponseRedirect(url)
|
||||
except AuthenticationException as e:
|
||||
url = urljoin(
|
||||
host,
|
||||
"?" + urlencode(e.get_error_dict()),
|
||||
)
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
|
||||
class OIDCLogoutEndpoint(View):
|
||||
|
||||
def get(self, request):
|
||||
logout(request=request)
|
||||
return HttpResponseRedirect(base_host(request=request, is_app=True))
|
||||
122
apiserver/plane/authentication/views/app/saml.py
Normal file
122
apiserver/plane/authentication/views/app/saml.py
Normal file
@@ -0,0 +1,122 @@
|
||||
# Python imports
|
||||
from urllib.parse import urlencode, urljoin
|
||||
|
||||
# Django imports
|
||||
from django.http import HttpResponseRedirect, HttpResponse
|
||||
from django.views import View
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.utils.decorators import method_decorator
|
||||
|
||||
from django.contrib.auth import logout
|
||||
|
||||
# Module imports
|
||||
from plane.authentication.adapter.saml import SAMLAdapter
|
||||
from plane.authentication.utils.login import user_login
|
||||
from plane.authentication.utils.workspace_project_join import (
|
||||
process_workspace_project_invitations,
|
||||
)
|
||||
from plane.authentication.utils.redirection_path import get_redirection_path
|
||||
from plane.license.models import Instance
|
||||
from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
from plane.authentication.utils.host import base_host
|
||||
|
||||
|
||||
class SAMLAuthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
request.session["host"] = base_host(request=request, is_app=True)
|
||||
next_path = request.GET.get("next_path")
|
||||
if next_path:
|
||||
request.session["next_path"] = str(next_path)
|
||||
|
||||
try:
|
||||
# Check instance configuration
|
||||
instance = Instance.objects.first()
|
||||
if instance is None or not instance.is_setup_done:
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES[
|
||||
"INSTANCE_NOT_CONFIGURED"
|
||||
],
|
||||
error_message="INSTANCE_NOT_CONFIGURED",
|
||||
)
|
||||
# Provider
|
||||
provider = SAMLAdapter(
|
||||
request=request,
|
||||
)
|
||||
# Get the auth url
|
||||
return_url = provider.get_auth_url()
|
||||
return HttpResponseRedirect(return_url)
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True),
|
||||
"?" + urlencode(params),
|
||||
)
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
|
||||
@method_decorator(csrf_exempt, name="dispatch")
|
||||
class SAMLCallbackEndpoint(View):
|
||||
def post(self, request):
|
||||
host = request.session.get("host", "/")
|
||||
try:
|
||||
provider = SAMLAdapter(request=request)
|
||||
user = provider.authenticate()
|
||||
# Login the user and record his device info
|
||||
user_login(request=request, user=user)
|
||||
# Process workspace and project invitations
|
||||
process_workspace_project_invitations(user=user)
|
||||
# Get the redirection path
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
url = urljoin(host, path)
|
||||
return HttpResponseRedirect(url)
|
||||
except AuthenticationException as e:
|
||||
url = urljoin(host, "?" + urlencode(e.get_error_dict()))
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
|
||||
@method_decorator(csrf_exempt, name="dispatch")
|
||||
class SAMLLogoutEndpoint(View):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
logout(request=request)
|
||||
return HttpResponseRedirect(base_host(request=request, is_app=True))
|
||||
|
||||
|
||||
@method_decorator(csrf_exempt, name="dispatch")
|
||||
class SAMLMetadataEndpoint(View):
|
||||
|
||||
def get(self, request):
|
||||
xml_template = f"""<EntityDescriptor xmlns="urn:oasis:names:tc:SAML:2.0:metadata"
|
||||
entityID="{request.scheme}://{request.get_host()}/auth/saml/metadata/">
|
||||
<SPSSODescriptor protocolSupportEnumeration="urn:oasis:names:tc:SAML:2.0:protocol">
|
||||
<AssertionConsumerService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
|
||||
Location="{request.scheme}://{request.get_host()}/auth/saml/callback/"
|
||||
index="1"/>
|
||||
<SingleLogoutService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
|
||||
Location="{request.scheme}://{request.get_host()}/auth/saml/logout/"/>
|
||||
<NameIDFormat>urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress</NameIDFormat>
|
||||
<AttributeConsumingService index="1">
|
||||
<ServiceName xml:lang="en">Plane</ServiceName>
|
||||
<RequestedAttribute Name="user.firstName"
|
||||
FriendlyName="first_name"
|
||||
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
|
||||
isRequired="false"/>
|
||||
<RequestedAttribute Name="user.lastName"
|
||||
FriendlyName="last_name"
|
||||
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
|
||||
isRequired="false"/>
|
||||
<RequestedAttribute Name="user.email"
|
||||
FriendlyName="email"
|
||||
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
|
||||
isRequired="true"/>
|
||||
</AttributeConsumingService>
|
||||
</SPSSODescriptor>
|
||||
</EntityDescriptor>
|
||||
"""
|
||||
return HttpResponse(xml_template, content_type="application/xml")
|
||||
@@ -7,6 +7,8 @@ from django.utils import timezone
|
||||
# Module imports
|
||||
from plane.authentication.utils.host import user_ip, base_host
|
||||
from plane.db.models import User
|
||||
from plane.authentication.provider.oauth.oidc import OIDCOAuthProvider
|
||||
from plane.authentication.adapter.saml import SAMLAdapter
|
||||
|
||||
|
||||
class SignOutAuthEndpoint(View):
|
||||
@@ -18,7 +20,28 @@ class SignOutAuthEndpoint(View):
|
||||
user.last_logout_ip = user_ip(request=request)
|
||||
user.last_logout_time = timezone.now()
|
||||
user.save()
|
||||
# Log the user out
|
||||
|
||||
# Check if the last medium of user is oidc
|
||||
if request.user.last_login_medium == "oidc":
|
||||
provider = OIDCOAuthProvider(
|
||||
request=request,
|
||||
)
|
||||
logout_url = provider.logout(
|
||||
logout_url=f"{base_host(request=request, is_app=True)}/auth/oidc/logout/"
|
||||
)
|
||||
if logout_url:
|
||||
return HttpResponseRedirect(logout_url)
|
||||
|
||||
# Check if the last medium of user is saml
|
||||
if request.user.last_login_medium == "saml":
|
||||
provider = SAMLAdapter(
|
||||
request=request,
|
||||
)
|
||||
logout_url = provider.logout()
|
||||
if logout_url:
|
||||
return HttpResponseRedirect(logout_url)
|
||||
|
||||
# Logout user
|
||||
logout(request)
|
||||
return HttpResponseRedirect(
|
||||
base_host(request=request, is_app=True)
|
||||
|
||||
@@ -2,14 +2,69 @@ from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
from plane.db.models import APIActivityLog
|
||||
from celery import shared_task
|
||||
from django.conf import settings
|
||||
from pymongo import MongoClient
|
||||
from pymongo.errors import BulkWriteError
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
BATCH_SIZE = 3000
|
||||
|
||||
|
||||
@shared_task
|
||||
def delete_api_logs():
|
||||
# Get the logs older than 30 days to delete
|
||||
logs_to_delete = APIActivityLog.objects.filter(
|
||||
created_at__lte=timezone.now() - timedelta(days=30)
|
||||
)
|
||||
|
||||
# Delete the logs
|
||||
logs_to_delete._raw_delete(logs_to_delete.db)
|
||||
if settings.MONGO_DB_URL:
|
||||
# Get the logs older than 30 days to delete
|
||||
logs_to_delete = APIActivityLog.objects.filter(
|
||||
created_at__lte=timezone.now() - timedelta(days=30)
|
||||
)
|
||||
|
||||
# Create a MongoDB client
|
||||
client = MongoClient(settings.MONGO_DB_URL)
|
||||
db = client["plane"]
|
||||
collection = db["api_activity_logs"]
|
||||
|
||||
# Function to insert documents in batches
|
||||
def bulk_insert(docs):
|
||||
try:
|
||||
collection.insert_many(docs)
|
||||
except BulkWriteError as bwe:
|
||||
log_exception(bwe)
|
||||
|
||||
# Prepare the logs for bulk insert
|
||||
def log_generator():
|
||||
batch = []
|
||||
for log in logs_to_delete.iterator():
|
||||
batch.append(
|
||||
{
|
||||
"token_identifier": log.token_identifier,
|
||||
"path": log.path,
|
||||
"method": log.method,
|
||||
"query_params": log.query_params,
|
||||
"headers": log.headers,
|
||||
"body": log.body,
|
||||
"response_body": log.response_body,
|
||||
"response_code": log.response_code,
|
||||
"ip_address": log.ip_address,
|
||||
"user_agent": log.user_agent,
|
||||
"created_at": log.created_at,
|
||||
"updated_at": log.updated_at,
|
||||
"created_by": str(log.created_by_id) if log.created_by_id else None,
|
||||
"updated_by": str(log.updated_by_id) if log.updated_by_id else None,
|
||||
}
|
||||
)
|
||||
# If batch size is reached, yield the batch
|
||||
if len(batch) == BATCH_SIZE:
|
||||
yield batch
|
||||
batch = []
|
||||
|
||||
# Yield the remaining logs
|
||||
if batch:
|
||||
yield batch
|
||||
|
||||
# Upload the logs to MongoDB in batches
|
||||
for batch in log_generator():
|
||||
bulk_insert(batch)
|
||||
|
||||
# Delete the logs
|
||||
logs_to_delete._raw_delete(logs_to_delete.db)
|
||||
|
||||
598
apiserver/plane/bgtasks/create_faker.py
Normal file
598
apiserver/plane/bgtasks/create_faker.py
Normal file
@@ -0,0 +1,598 @@
|
||||
# Python imports
|
||||
import random
|
||||
from datetime import datetime
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Max
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from faker import Faker
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import (
|
||||
Workspace,
|
||||
WorkspaceMember,
|
||||
User,
|
||||
Project,
|
||||
ProjectMember,
|
||||
State,
|
||||
Label,
|
||||
Cycle,
|
||||
Module,
|
||||
Issue,
|
||||
IssueSequence,
|
||||
IssueAssignee,
|
||||
IssueLabel,
|
||||
IssueActivity,
|
||||
CycleIssue,
|
||||
ModuleIssue,
|
||||
)
|
||||
|
||||
|
||||
def create_workspace_members(workspace, members):
|
||||
members = User.objects.filter(email__in=members)
|
||||
|
||||
_ = WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
workspace=workspace,
|
||||
member=member,
|
||||
role=20,
|
||||
)
|
||||
for member in members
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
def create_project(workspace, user_id):
|
||||
fake = Faker()
|
||||
name = fake.name()
|
||||
project = Project.objects.create(
|
||||
workspace=workspace,
|
||||
name=name,
|
||||
identifier=name[
|
||||
: random.randint(2, 12 if len(name) - 1 >= 12 else len(name) - 1)
|
||||
].upper(),
|
||||
created_by_id=user_id,
|
||||
)
|
||||
|
||||
# Add current member as project member
|
||||
_ = ProjectMember.objects.create(
|
||||
project=project,
|
||||
member_id=user_id,
|
||||
role=20,
|
||||
)
|
||||
|
||||
return project
|
||||
|
||||
|
||||
def create_project_members(workspace, project, members):
|
||||
members = User.objects.filter(email__in=members)
|
||||
|
||||
_ = ProjectMember.objects.bulk_create(
|
||||
[
|
||||
ProjectMember(
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
member=member,
|
||||
role=20,
|
||||
sort_order=random.randint(0, 65535),
|
||||
)
|
||||
for member in members
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
def create_states(workspace, project, user_id):
|
||||
states = [
|
||||
{
|
||||
"name": "Backlog",
|
||||
"color": "#A3A3A3",
|
||||
"sequence": 15000,
|
||||
"group": "backlog",
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"name": "Todo",
|
||||
"color": "#3A3A3A",
|
||||
"sequence": 25000,
|
||||
"group": "unstarted",
|
||||
},
|
||||
{
|
||||
"name": "In Progress",
|
||||
"color": "#F59E0B",
|
||||
"sequence": 35000,
|
||||
"group": "started",
|
||||
},
|
||||
{
|
||||
"name": "Done",
|
||||
"color": "#16A34A",
|
||||
"sequence": 45000,
|
||||
"group": "completed",
|
||||
},
|
||||
{
|
||||
"name": "Cancelled",
|
||||
"color": "#EF4444",
|
||||
"sequence": 55000,
|
||||
"group": "cancelled",
|
||||
},
|
||||
]
|
||||
|
||||
states = State.objects.bulk_create(
|
||||
[
|
||||
State(
|
||||
name=state["name"],
|
||||
color=state["color"],
|
||||
project=project,
|
||||
sequence=state["sequence"],
|
||||
workspace=workspace,
|
||||
group=state["group"],
|
||||
default=state.get("default", False),
|
||||
created_by_id=user_id,
|
||||
)
|
||||
for state in states
|
||||
]
|
||||
)
|
||||
|
||||
return states
|
||||
|
||||
|
||||
def create_labels(workspace, project, user_id):
|
||||
fake = Faker()
|
||||
Faker.seed(0)
|
||||
|
||||
return Label.objects.bulk_create(
|
||||
[
|
||||
Label(
|
||||
name=fake.color_name(),
|
||||
color=fake.hex_color(),
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
created_by_id=user_id,
|
||||
sort_order=random.randint(0, 65535),
|
||||
)
|
||||
for _ in range(0, 50)
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
|
||||
def create_cycles(workspace, project, user_id, cycle_count):
|
||||
fake = Faker()
|
||||
Faker.seed(0)
|
||||
|
||||
cycles = []
|
||||
used_date_ranges = set() # Track used date ranges
|
||||
|
||||
while len(cycles) <= cycle_count:
|
||||
# Generate a start date, allowing for None
|
||||
start_date_option = [None, fake.date_this_year()]
|
||||
start_date = start_date_option[random.randint(0, 1)]
|
||||
|
||||
# Initialize end_date based on start_date
|
||||
end_date = (
|
||||
None
|
||||
if start_date is None
|
||||
else fake.date_between_dates(
|
||||
date_start=start_date,
|
||||
date_end=datetime.now().date().replace(month=12, day=31),
|
||||
)
|
||||
)
|
||||
|
||||
# Ensure end_date is strictly after start_date if start_date is not None
|
||||
while start_date is not None and (
|
||||
end_date <= start_date
|
||||
or (start_date, end_date) in used_date_ranges
|
||||
):
|
||||
end_date = fake.date_this_year()
|
||||
|
||||
# Add the unique date range to the set
|
||||
(
|
||||
used_date_ranges.add((start_date, end_date))
|
||||
if (end_date is not None and start_date is not None)
|
||||
else None
|
||||
)
|
||||
|
||||
# Append the cycle with unique date range
|
||||
cycles.append(
|
||||
Cycle(
|
||||
name=fake.name(),
|
||||
owned_by_id=user_id,
|
||||
sort_order=random.randint(0, 65535),
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
)
|
||||
)
|
||||
|
||||
return Cycle.objects.bulk_create(cycles, ignore_conflicts=True)
|
||||
|
||||
|
||||
def create_modules(workspace, project, user_id, module_count):
|
||||
fake = Faker()
|
||||
Faker.seed(0)
|
||||
|
||||
modules = []
|
||||
for _ in range(0, module_count):
|
||||
start_date = [None, fake.date_this_year()][random.randint(0, 1)]
|
||||
end_date = (
|
||||
None
|
||||
if start_date is None
|
||||
else fake.date_between_dates(
|
||||
date_start=start_date,
|
||||
date_end=datetime.now().date().replace(month=12, day=31),
|
||||
)
|
||||
)
|
||||
|
||||
modules.append(
|
||||
Module(
|
||||
name=fake.name(),
|
||||
sort_order=random.randint(0, 65535),
|
||||
start_date=start_date,
|
||||
target_date=end_date,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
)
|
||||
)
|
||||
|
||||
return Module.objects.bulk_create(modules, ignore_conflicts=True)
|
||||
|
||||
|
||||
def create_issues(workspace, project, user_id, issue_count):
|
||||
fake = Faker()
|
||||
Faker.seed(0)
|
||||
|
||||
states = State.objects.values_list("id", flat=True)
|
||||
creators = ProjectMember.objects.values_list("member_id", flat=True)
|
||||
|
||||
issues = []
|
||||
|
||||
# Get the maximum sequence_id
|
||||
last_id = IssueSequence.objects.filter(
|
||||
project=project,
|
||||
).aggregate(
|
||||
largest=Max("sequence")
|
||||
)["largest"]
|
||||
|
||||
last_id = 1 if last_id is None else last_id + 1
|
||||
|
||||
# Get the maximum sort order
|
||||
largest_sort_order = Issue.objects.filter(
|
||||
project=project,
|
||||
state_id=states[random.randint(0, len(states) - 1)],
|
||||
).aggregate(largest=Max("sort_order"))["largest"]
|
||||
|
||||
largest_sort_order = (
|
||||
65535 if largest_sort_order is None else largest_sort_order + 10000
|
||||
)
|
||||
|
||||
for _ in range(0, issue_count):
|
||||
start_date = [None, fake.date_this_year()][random.randint(0, 1)]
|
||||
end_date = (
|
||||
None
|
||||
if start_date is None
|
||||
else fake.date_between_dates(
|
||||
date_start=start_date,
|
||||
date_end=datetime.now().date().replace(month=12, day=31),
|
||||
)
|
||||
)
|
||||
|
||||
sentence = fake.sentence()
|
||||
issues.append(
|
||||
Issue(
|
||||
state_id=states[random.randint(0, len(states) - 1)],
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
name=sentence[:254],
|
||||
description_html=f"<p>{sentence}</p>",
|
||||
description_stripped=sentence,
|
||||
sequence_id=last_id,
|
||||
sort_order=largest_sort_order,
|
||||
start_date=start_date,
|
||||
target_date=end_date,
|
||||
priority=["urgent", "high", "medium", "low", "none"][
|
||||
random.randint(0, 4)
|
||||
],
|
||||
created_by_id=creators[random.randint(0, len(creators) - 1)],
|
||||
)
|
||||
)
|
||||
|
||||
largest_sort_order = largest_sort_order + random.randint(0, 1000)
|
||||
last_id = last_id + 1
|
||||
|
||||
issues = Issue.objects.bulk_create(
|
||||
issues, ignore_conflicts=True, batch_size=1000
|
||||
)
|
||||
# Sequences
|
||||
_ = IssueSequence.objects.bulk_create(
|
||||
[
|
||||
IssueSequence(
|
||||
issue=issue,
|
||||
sequence=issue.sequence_id,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
)
|
||||
for issue in issues
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
# Track the issue activities
|
||||
IssueActivity.objects.bulk_create(
|
||||
[
|
||||
IssueActivity(
|
||||
issue=issue,
|
||||
actor_id=user_id,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
comment="created the issue",
|
||||
verb="created",
|
||||
created_by_id=user_id,
|
||||
)
|
||||
for issue in issues
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
def create_issue_parent(workspace, project, user_id, issue_count):
|
||||
|
||||
parent_count = issue_count / 4
|
||||
|
||||
parent_issues = Issue.objects.filter(project=project).values_list(
|
||||
"id", flat=True
|
||||
)[: int(parent_count)]
|
||||
sub_issues = Issue.objects.filter(project=project).exclude(
|
||||
pk__in=parent_issues
|
||||
)[: int(issue_count / 2)]
|
||||
|
||||
bulk_sub_issues = []
|
||||
for sub_issue in sub_issues:
|
||||
sub_issue.parent_id = parent_issues[
|
||||
random.randint(0, int(parent_count - 1))
|
||||
]
|
||||
|
||||
Issue.objects.bulk_update(bulk_sub_issues, ["parent"], batch_size=1000)
|
||||
|
||||
|
||||
def create_issue_assignees(workspace, project, user_id, issue_count):
|
||||
# assignees
|
||||
assignees = ProjectMember.objects.filter(project=project).values_list(
|
||||
"member_id", flat=True
|
||||
)
|
||||
issues = random.sample(
|
||||
list(
|
||||
Issue.objects.filter(project=project).values_list("id", flat=True)
|
||||
),
|
||||
int(issue_count / 2),
|
||||
)
|
||||
|
||||
# Bulk issue
|
||||
bulk_issue_assignees = []
|
||||
for issue in issues:
|
||||
for assignee in random.sample(
|
||||
list(assignees), random.randint(0, len(assignees) - 1)
|
||||
):
|
||||
bulk_issue_assignees.append(
|
||||
IssueAssignee(
|
||||
issue_id=issue,
|
||||
assignee_id=assignee,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
)
|
||||
)
|
||||
|
||||
# Issue assignees
|
||||
IssueAssignee.objects.bulk_create(
|
||||
bulk_issue_assignees, batch_size=1000, ignore_conflicts=True
|
||||
)
|
||||
|
||||
|
||||
def create_issue_labels(workspace, project, user_id, issue_count):
|
||||
# assignees
|
||||
labels = Label.objects.filter(project=project).values_list("id", flat=True)
|
||||
issues = random.sample(
|
||||
list(
|
||||
Issue.objects.filter(project=project).values_list("id", flat=True)
|
||||
),
|
||||
int(issue_count / 2),
|
||||
)
|
||||
|
||||
# Bulk issue
|
||||
bulk_issue_labels = []
|
||||
for issue in issues:
|
||||
for label in random.sample(
|
||||
list(labels), random.randint(0, len(labels) - 1)
|
||||
):
|
||||
bulk_issue_labels.append(
|
||||
IssueLabel(
|
||||
issue_id=issue,
|
||||
label_id=label,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
)
|
||||
)
|
||||
|
||||
# Issue assignees
|
||||
IssueLabel.objects.bulk_create(
|
||||
bulk_issue_labels, batch_size=1000, ignore_conflicts=True
|
||||
)
|
||||
|
||||
|
||||
def create_cycle_issues(workspace, project, user_id, issue_count):
|
||||
# assignees
|
||||
cycles = Cycle.objects.filter(project=project).values_list("id", flat=True)
|
||||
issues = random.sample(
|
||||
list(
|
||||
Issue.objects.filter(project=project).values_list("id", flat=True)
|
||||
),
|
||||
int(issue_count / 2),
|
||||
)
|
||||
|
||||
# Bulk issue
|
||||
bulk_cycle_issues = []
|
||||
for issue in issues:
|
||||
cycle = cycles[random.randint(0, len(cycles) - 1)]
|
||||
bulk_cycle_issues.append(
|
||||
CycleIssue(
|
||||
cycle_id=cycle,
|
||||
issue_id=issue,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
)
|
||||
)
|
||||
|
||||
# Issue assignees
|
||||
CycleIssue.objects.bulk_create(
|
||||
bulk_cycle_issues, batch_size=1000, ignore_conflicts=True
|
||||
)
|
||||
|
||||
|
||||
def create_module_issues(workspace, project, user_id, issue_count):
|
||||
# assignees
|
||||
modules = Module.objects.filter(project=project).values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
issues = random.sample(
|
||||
list(
|
||||
Issue.objects.filter(project=project).values_list("id", flat=True)
|
||||
),
|
||||
int(issue_count / 2),
|
||||
)
|
||||
|
||||
# Bulk issue
|
||||
bulk_module_issues = []
|
||||
for issue in issues:
|
||||
module = modules[random.randint(0, len(modules) - 1)]
|
||||
bulk_module_issues.append(
|
||||
ModuleIssue(
|
||||
module_id=module,
|
||||
issue_id=issue,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
)
|
||||
)
|
||||
# Issue assignees
|
||||
ModuleIssue.objects.bulk_create(
|
||||
bulk_module_issues, batch_size=1000, ignore_conflicts=True
|
||||
)
|
||||
|
||||
|
||||
@shared_task
|
||||
def create_fake_data(
|
||||
slug, email, members, issue_count, cycle_count, module_count
|
||||
):
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
user = User.objects.get(email=email)
|
||||
user_id = user.id
|
||||
|
||||
# create workspace members
|
||||
print("creating workspace members")
|
||||
create_workspace_members(workspace=workspace, members=members)
|
||||
print("Done creating workspace members")
|
||||
|
||||
# Create a project
|
||||
print("Creating project")
|
||||
project = create_project(workspace=workspace, user_id=user_id)
|
||||
print("Done creating projects")
|
||||
|
||||
# create project members
|
||||
print("Creating project members")
|
||||
create_project_members(
|
||||
workspace=workspace, project=project, members=members
|
||||
)
|
||||
print("Done creating project members")
|
||||
|
||||
# Create states
|
||||
print("Creating states")
|
||||
_ = create_states(workspace=workspace, project=project, user_id=user_id)
|
||||
print("Done creating states")
|
||||
|
||||
# Create labels
|
||||
print("Creating labels")
|
||||
_ = create_labels(workspace=workspace, project=project, user_id=user_id)
|
||||
print("Done creating labels")
|
||||
|
||||
# create cycles
|
||||
print("Creating cycles")
|
||||
_ = create_cycles(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
cycle_count=cycle_count,
|
||||
)
|
||||
print("Done creating cycles")
|
||||
|
||||
# create modules
|
||||
print("Creating modules")
|
||||
_ = create_modules(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
module_count=module_count,
|
||||
)
|
||||
print("Done creating modules")
|
||||
|
||||
print("Creating issues")
|
||||
create_issues(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
issue_count=issue_count,
|
||||
)
|
||||
print("Done creating issues")
|
||||
|
||||
print("Creating parent and sub issues")
|
||||
create_issue_parent(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
issue_count=issue_count,
|
||||
)
|
||||
print("Done creating parent and sub issues")
|
||||
|
||||
print("Creating issue assignees")
|
||||
create_issue_assignees(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
issue_count=issue_count,
|
||||
)
|
||||
print("Done creating issue assignees")
|
||||
|
||||
print("Creating issue labels")
|
||||
create_issue_labels(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
issue_count=issue_count,
|
||||
)
|
||||
print("Done creating issue labels")
|
||||
|
||||
print("Creating cycle issues")
|
||||
create_cycle_issues(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
issue_count=issue_count,
|
||||
)
|
||||
print("Done creating cycle issues")
|
||||
|
||||
print("Creating module issues")
|
||||
create_module_issues(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
issue_count=issue_count,
|
||||
)
|
||||
print("Done creating module issues")
|
||||
|
||||
return
|
||||
@@ -37,7 +37,7 @@ from plane.db.models import (
|
||||
def create_project(workspace, user_id):
|
||||
fake = Faker()
|
||||
name = fake.name()
|
||||
unique_id = str(uuid.uuid4())[:5]
|
||||
unique_id = str(uuid.uuid4())[:5]
|
||||
|
||||
project = Project.objects.create(
|
||||
workspace=workspace,
|
||||
@@ -244,7 +244,6 @@ def create_pages(workspace, project, user_id, pages_count):
|
||||
pages.append(
|
||||
Page(
|
||||
name=fake.name(),
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
owned_by_id=user_id,
|
||||
access=random.randint(0, 1),
|
||||
@@ -292,8 +291,14 @@ def create_issues(workspace, project, user_id, issue_count):
|
||||
fake = Faker()
|
||||
Faker.seed(0)
|
||||
|
||||
states = State.objects.filter(workspace=workspace, project=project).exclude(group="Triage").values_list("id", flat=True)
|
||||
creators = ProjectMember.objects.filter(workspace=workspace, project=project).values_list("member_id", flat=True)
|
||||
states = (
|
||||
State.objects.filter(workspace=workspace, project=project)
|
||||
.exclude(group="Triage")
|
||||
.values_list("id", flat=True)
|
||||
)
|
||||
creators = ProjectMember.objects.filter(
|
||||
workspace=workspace, project=project
|
||||
).values_list("member_id", flat=True)
|
||||
|
||||
issues = []
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ def posthogConfiguration():
|
||||
|
||||
|
||||
@shared_task
|
||||
def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
|
||||
def track_event(email, event_name, properties):
|
||||
try:
|
||||
POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration()
|
||||
|
||||
@@ -39,43 +39,7 @@ def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
|
||||
posthog.capture(
|
||||
email,
|
||||
event=event_name,
|
||||
properties={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"user": {"email": email, "id": str(user)},
|
||||
"device_ctx": {
|
||||
"ip": ip,
|
||||
"user_agent": user_agent,
|
||||
},
|
||||
"medium": medium,
|
||||
"first_time": first_time,
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return
|
||||
|
||||
|
||||
@shared_task
|
||||
def workspace_invite_event(
|
||||
user, email, user_agent, ip, event_name, accepted_from
|
||||
):
|
||||
try:
|
||||
POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration()
|
||||
|
||||
if POSTHOG_API_KEY and POSTHOG_HOST:
|
||||
posthog = Posthog(POSTHOG_API_KEY, host=POSTHOG_HOST)
|
||||
posthog.capture(
|
||||
email,
|
||||
event=event_name,
|
||||
properties={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"user": {"email": email, "id": str(user)},
|
||||
"device_ctx": {
|
||||
"ip": ip,
|
||||
"user_agent": user_agent,
|
||||
},
|
||||
"accepted_from": accepted_from,
|
||||
},
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
216
apiserver/plane/bgtasks/importer_task.py
Normal file
216
apiserver/plane/bgtasks/importer_task.py
Normal file
@@ -0,0 +1,216 @@
|
||||
# Python imports
|
||||
import json
|
||||
import requests
|
||||
import uuid
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.contrib.auth.hashers import make_password
|
||||
|
||||
# Third Party imports
|
||||
from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.app.serializers import ImporterSerializer
|
||||
from plane.db.models import (
|
||||
Importer,
|
||||
WorkspaceMember,
|
||||
GithubRepositorySync,
|
||||
GithubRepository,
|
||||
ProjectMember,
|
||||
WorkspaceIntegration,
|
||||
Label,
|
||||
User,
|
||||
IssueProperty,
|
||||
UserNotificationPreference,
|
||||
)
|
||||
|
||||
from plane.bgtasks.user_welcome_task import send_welcome_slack
|
||||
from plane.payment.bgtasks.member_sync_task import member_sync_task
|
||||
|
||||
|
||||
@shared_task
|
||||
def service_importer(service, importer_id):
|
||||
try:
|
||||
importer = Importer.objects.get(pk=importer_id)
|
||||
importer.status = "processing"
|
||||
importer.save()
|
||||
|
||||
users = importer.data.get("users", [])
|
||||
|
||||
# Check if we need to import users as well
|
||||
if len(users):
|
||||
# For all invited users create the users
|
||||
new_users = User.objects.bulk_create(
|
||||
[
|
||||
User(
|
||||
email=user.get("email").strip().lower(),
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
)
|
||||
for user in users
|
||||
if user.get("import", False) == "invite"
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
_ = UserNotificationPreference.objects.bulk_create(
|
||||
[UserNotificationPreference(user=user) for user in new_users],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
_ = [
|
||||
send_welcome_slack.delay(
|
||||
str(user.id),
|
||||
True,
|
||||
f"{user.email} was imported to Plane from {service}",
|
||||
)
|
||||
for user in new_users
|
||||
]
|
||||
|
||||
workspace_users = User.objects.filter(
|
||||
email__in=[
|
||||
user.get("email").strip().lower()
|
||||
for user in users
|
||||
if user.get("import", False) == "invite"
|
||||
or user.get("import", False) == "map"
|
||||
]
|
||||
)
|
||||
|
||||
# Check if any of the users are already member of workspace
|
||||
_ = WorkspaceMember.objects.filter(
|
||||
member__in=[user for user in workspace_users],
|
||||
workspace_id=importer.workspace_id,
|
||||
).update(is_active=True)
|
||||
|
||||
# Add new users to Workspace and project automatically
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
member=user,
|
||||
workspace_id=importer.workspace_id,
|
||||
created_by=importer.created_by,
|
||||
)
|
||||
for user in workspace_users
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Sync workspace members
|
||||
member_sync_task(importer.workspace.slug)
|
||||
|
||||
ProjectMember.objects.bulk_create(
|
||||
[
|
||||
ProjectMember(
|
||||
project_id=importer.project_id,
|
||||
workspace_id=importer.workspace_id,
|
||||
member=user,
|
||||
created_by=importer.created_by,
|
||||
)
|
||||
for user in workspace_users
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
IssueProperty.objects.bulk_create(
|
||||
[
|
||||
IssueProperty(
|
||||
project_id=importer.project_id,
|
||||
workspace_id=importer.workspace_id,
|
||||
user=user,
|
||||
created_by=importer.created_by,
|
||||
)
|
||||
for user in workspace_users
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Check if sync config is on for github importers
|
||||
if service == "github" and importer.config.get("sync", False):
|
||||
name = importer.metadata.get("name", False)
|
||||
url = importer.metadata.get("url", False)
|
||||
config = importer.metadata.get("config", {})
|
||||
owner = importer.metadata.get("owner", False)
|
||||
repository_id = importer.metadata.get("repository_id", False)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
workspace_id=importer.workspace_id,
|
||||
integration__provider="github",
|
||||
)
|
||||
|
||||
# Delete the old repository object
|
||||
GithubRepositorySync.objects.filter(
|
||||
project_id=importer.project_id
|
||||
).delete()
|
||||
GithubRepository.objects.filter(
|
||||
project_id=importer.project_id
|
||||
).delete()
|
||||
|
||||
# Create a Label for github
|
||||
label = Label.objects.filter(
|
||||
name="GitHub", project_id=importer.project_id
|
||||
).first()
|
||||
|
||||
if label is None:
|
||||
label = Label.objects.create(
|
||||
name="GitHub",
|
||||
project_id=importer.project_id,
|
||||
description="Label to sync Plane issues with GitHub issues",
|
||||
color="#003773",
|
||||
)
|
||||
# Create repository
|
||||
repo = GithubRepository.objects.create(
|
||||
name=name,
|
||||
url=url,
|
||||
config=config,
|
||||
repository_id=repository_id,
|
||||
owner=owner,
|
||||
project_id=importer.project_id,
|
||||
)
|
||||
|
||||
# Create repo sync
|
||||
_ = GithubRepositorySync.objects.create(
|
||||
repository=repo,
|
||||
workspace_integration=workspace_integration,
|
||||
actor=workspace_integration.actor,
|
||||
credentials=importer.data.get("credentials", {}),
|
||||
project_id=importer.project_id,
|
||||
label=label,
|
||||
)
|
||||
|
||||
# Add bot as a member in the project
|
||||
_ = ProjectMember.objects.get_or_create(
|
||||
member=workspace_integration.actor,
|
||||
role=20,
|
||||
project_id=importer.project_id,
|
||||
)
|
||||
|
||||
if settings.PROXY_BASE_URL:
|
||||
headers = {"Content-Type": "application/json"}
|
||||
import_data_json = json.dumps(
|
||||
ImporterSerializer(importer).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
_ = requests.post(
|
||||
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(importer.workspace_id)}/projects/{str(importer.project_id)}/importers/{str(service)}/",
|
||||
json=import_data_json,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
return
|
||||
except Exception as e:
|
||||
importer = Importer.objects.get(pk=importer_id)
|
||||
importer.status = "failed"
|
||||
importer.save()
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
@@ -10,7 +10,7 @@ from django.db.models import Q
|
||||
from django.utils import timezone
|
||||
|
||||
# Module imports
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import Issue, Project, State
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
|
||||
@@ -42,8 +42,9 @@ def page_transaction(new_value, old_value, page_id):
|
||||
new_transactions = []
|
||||
deleted_transaction_ids = set()
|
||||
|
||||
# TODO - Add "issue-embed-component", "img", "todo" components
|
||||
components = ["mention-component"]
|
||||
# TODO - Add "img", "todo" components
|
||||
components = ["mention-component", "issue-embed-component"]
|
||||
|
||||
for component in components:
|
||||
old_mentions = extract_components(old_value, component)
|
||||
new_mentions = extract_components(new_value, component)
|
||||
@@ -57,7 +58,11 @@ def page_transaction(new_value, old_value, page_id):
|
||||
transaction=mention["id"],
|
||||
page_id=page_id,
|
||||
entity_identifier=mention["entity_identifier"],
|
||||
entity_name=mention["entity_name"],
|
||||
entity_name=(
|
||||
mention["entity_name"]
|
||||
if mention["entity_name"]
|
||||
else "issue"
|
||||
),
|
||||
workspace_id=page.workspace_id,
|
||||
created_at=timezone.now(),
|
||||
updated_at=timezone.now(),
|
||||
|
||||
36
apiserver/plane/bgtasks/user_welcome_task.py
Normal file
36
apiserver/plane/bgtasks/user_welcome_task.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
from slack_sdk import WebClient
|
||||
from slack_sdk.errors import SlackApiError
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import User
|
||||
|
||||
|
||||
@shared_task
|
||||
def send_welcome_slack(user_id, created, message):
|
||||
try:
|
||||
instance = User.objects.get(pk=user_id)
|
||||
|
||||
if created and not instance.is_bot:
|
||||
# Send message on slack as well
|
||||
if settings.SLACK_BOT_TOKEN:
|
||||
client = WebClient(token=settings.SLACK_BOT_TOKEN)
|
||||
try:
|
||||
_ = client.chat_postMessage(
|
||||
channel="#trackers",
|
||||
text=message,
|
||||
)
|
||||
except SlackApiError as e:
|
||||
print(f"Got an error: {e.response['error']}")
|
||||
return
|
||||
except Exception as e:
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
@@ -3,8 +3,11 @@ import logging
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from slack_sdk import WebClient
|
||||
from slack_sdk.errors import SlackApiError
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.html import strip_tags
|
||||
@@ -15,6 +18,18 @@ from plane.license.utils.instance_value import get_email_configuration
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
|
||||
def push_updated_to_slack(workspace, workspace_member_invite):
|
||||
# Send message on slack as well
|
||||
client = WebClient(token=settings.SLACK_BOT_TOKEN)
|
||||
try:
|
||||
_ = client.chat_postMessage(
|
||||
channel="#trackers",
|
||||
text=f"{workspace_member_invite.email} has been invited to {workspace.name} as a {workspace_member_invite.role}",
|
||||
)
|
||||
except SlackApiError as e:
|
||||
print(f"Got an error: {e.response['error']}")
|
||||
|
||||
|
||||
@shared_task
|
||||
def workspace_invitation(email, workspace_id, token, current_site, invitor):
|
||||
try:
|
||||
@@ -80,6 +95,10 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent succesfully")
|
||||
|
||||
# Send message on slack as well
|
||||
if settings.SLACK_BOT_TOKEN:
|
||||
push_updated_to_slack(workspace, workspace_member_invite)
|
||||
|
||||
return
|
||||
except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist) as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -36,6 +36,10 @@ app.conf.beat_schedule = {
|
||||
"task": "plane.bgtasks.api_logs_task.delete_api_logs",
|
||||
"schedule": crontab(hour=0, minute=0),
|
||||
},
|
||||
"check-every-12-hr-instance-version": {
|
||||
"task": "plane.license.bgtasks.version_check_task.version_check",
|
||||
"schedule": crontab(hour="*/12", minute=0),
|
||||
},
|
||||
}
|
||||
|
||||
# Load task modules from all registered Django app configs.
|
||||
|
||||
79
apiserver/plane/db/management/commands/faker.py
Normal file
79
apiserver/plane/db/management/commands/faker.py
Normal file
@@ -0,0 +1,79 @@
|
||||
# Django imports
|
||||
from typing import Any
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import User, Workspace, WorkspaceMember
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Create dump issues, cycles etc. for a project in a given workspace"
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> str | None:
|
||||
|
||||
try:
|
||||
workspace_name = input("Workspace Name: ")
|
||||
workspace_slug = input("Workspace slug: ")
|
||||
|
||||
if workspace_slug == "":
|
||||
raise CommandError("Workspace slug is required")
|
||||
|
||||
if Workspace.objects.filter(slug=workspace_slug).exists():
|
||||
raise CommandError("Workspace already exists")
|
||||
|
||||
creator = input("Your email: ")
|
||||
|
||||
if (
|
||||
creator == ""
|
||||
or not User.objects.filter(email=creator).exists()
|
||||
):
|
||||
raise CommandError(
|
||||
"User email is required and should be existing in Database"
|
||||
)
|
||||
|
||||
user = User.objects.get(email=creator)
|
||||
|
||||
members = input("Enter Member emails (comma separated): ")
|
||||
members = members.split(",") if members != "" else []
|
||||
|
||||
issue_count = int(
|
||||
input("Number of issues to be created: ")
|
||||
)
|
||||
cycle_count = int(
|
||||
input("Number of cycles to be created: ")
|
||||
)
|
||||
module_count = int(
|
||||
input("Number of modules to be created: ")
|
||||
)
|
||||
|
||||
# Create workspace
|
||||
workspace = Workspace.objects.create(
|
||||
slug=workspace_slug,
|
||||
name=workspace_name,
|
||||
owner=user,
|
||||
)
|
||||
# Create workspace member
|
||||
WorkspaceMember.objects.create(
|
||||
workspace=workspace, role=20, member=user
|
||||
)
|
||||
|
||||
from plane.bgtasks.create_faker import create_fake_data
|
||||
|
||||
create_fake_data.delay(
|
||||
slug=workspace_slug,
|
||||
email=creator,
|
||||
members=members,
|
||||
issue_count=issue_count,
|
||||
cycle_count=cycle_count,
|
||||
module_count=module_count,
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Data is pushed to the queue")
|
||||
)
|
||||
return
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f"Command errored out {str(e)}")
|
||||
)
|
||||
return
|
||||
75
apiserver/plane/db/management/commands/license_check.py
Normal file
75
apiserver/plane/db/management/commands/license_check.py
Normal file
@@ -0,0 +1,75 @@
|
||||
# Python imports
|
||||
import os
|
||||
import requests
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
# Django imports
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Check the license of the instance with Prime Server"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
try:
|
||||
# Verify the license key
|
||||
prime_host = os.environ.get("PRIME_HOST", False)
|
||||
machine_signature = os.environ.get("MACHINE_SIGNATURE", False)
|
||||
license_key = os.environ.get("LICENSE_KEY", False)
|
||||
deploy_platform = os.environ.get("DEPLOY_PLATFORM", False)
|
||||
domain = os.environ.get("LICENSE_DOMAIN", False)
|
||||
license_version = os.environ.get("LICENSE_VERSION", False)
|
||||
|
||||
# If any of the above is not provided raise a command error
|
||||
if not prime_host or not machine_signature or not license_key:
|
||||
raise CommandError("Invalid license key provided")
|
||||
|
||||
# Check with the license server
|
||||
response = requests.post(
|
||||
f"{prime_host}/api/validate/",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Api-Key": str(license_key),
|
||||
"X-Machine-Signature": str(machine_signature),
|
||||
},
|
||||
json={
|
||||
"machine_signature": str(machine_signature),
|
||||
"domain": domain,
|
||||
},
|
||||
)
|
||||
|
||||
# Check if status code is 204
|
||||
if response.status_code == 204:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("License key verified successfully")
|
||||
)
|
||||
return
|
||||
|
||||
elif response.status_code == 400:
|
||||
if deploy_platform == "KUBERNETES":
|
||||
response = requests.post(
|
||||
f"{prime_host}/api/kubernetes-setup/",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Api-Key": str(license_key),
|
||||
"X-Machine-Signature": str(machine_signature),
|
||||
},
|
||||
json={
|
||||
"machine_signature": str(machine_signature),
|
||||
"domain": domain,
|
||||
"version": license_version,
|
||||
},
|
||||
)
|
||||
response.raise_for_status()
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Instance created successfully")
|
||||
)
|
||||
|
||||
return
|
||||
else:
|
||||
raise CommandError("Instance does not exist")
|
||||
else:
|
||||
raise CommandError("Invalid license key provided")
|
||||
|
||||
except RequestException:
|
||||
raise CommandError("Could not verify the license key")
|
||||
@@ -80,7 +80,7 @@ from .workspace import (
|
||||
|
||||
from .importer import Importer
|
||||
|
||||
from .page import Page, PageLog, PageFavorite, PageLabel
|
||||
from .page import Page, PageLog, PageFavorite, PageLabel, ProjectPage, TeamPage
|
||||
|
||||
from .estimate import Estimate, EstimatePoint
|
||||
|
||||
|
||||
@@ -93,7 +93,9 @@ class PageLog(BaseModel):
|
||||
verbose_name="Transaction Type",
|
||||
)
|
||||
workspace = models.ForeignKey(
|
||||
"db.Workspace", on_delete=models.CASCADE, related_name="workspace_page_log"
|
||||
"db.Workspace",
|
||||
on_delete=models.CASCADE,
|
||||
related_name="workspace_page_log",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -15,9 +15,15 @@ from django.db import models
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
from django.utils import timezone
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from sentry_sdk import capture_exception
|
||||
from slack_sdk import WebClient
|
||||
from slack_sdk.errors import SlackApiError
|
||||
|
||||
# Module imports
|
||||
from ..mixins import TimeAuditModel
|
||||
from plane.db.mixins import TimeAuditModel
|
||||
|
||||
|
||||
def get_default_onboarding():
|
||||
@@ -215,3 +221,23 @@ def create_user_notification(sender, instance, created, **kwargs):
|
||||
mention=False,
|
||||
issue_completed=False,
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_save, sender=User)
|
||||
def send_welcome_slack(sender, instance, created, **kwargs):
|
||||
try:
|
||||
if created and not instance.is_bot:
|
||||
# Send message on slack as well
|
||||
if settings.SLACK_BOT_TOKEN:
|
||||
client = WebClient(token=settings.SLACK_BOT_TOKEN)
|
||||
try:
|
||||
_ = client.chat_postMessage(
|
||||
channel="#trackers",
|
||||
text=f"New user {instance.email} has signed up and begun the onboarding journey.",
|
||||
)
|
||||
except SlackApiError as e:
|
||||
print(f"Got an error: {e.response['error']}")
|
||||
return
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return
|
||||
|
||||
23
apiserver/plane/db/mongodb.py
Normal file
23
apiserver/plane/db/mongodb.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from pymongo import MongoClient
|
||||
|
||||
def singleton(cls):
|
||||
instances = {}
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
if cls not in instances:
|
||||
instances[cls] = cls(*args, **kwargs)
|
||||
return instances[cls]
|
||||
|
||||
return wrapper
|
||||
|
||||
@singleton
|
||||
class Database:
|
||||
db = None
|
||||
client = None
|
||||
|
||||
def __init__(self, mongo_uri, database_name):
|
||||
self.client = MongoClient(mongo_uri)
|
||||
self.db = self.client[database_name]
|
||||
|
||||
def get_db(self):
|
||||
return self.db
|
||||
1
apiserver/plane/ee/README.md
Normal file
1
apiserver/plane/ee/README.md
Normal file
@@ -0,0 +1 @@
|
||||
# Enterprise Edition APIs, Models etc.
|
||||
0
apiserver/plane/ee/__init__.py
Normal file
0
apiserver/plane/ee/__init__.py
Normal file
5
apiserver/plane/ee/apps.py
Normal file
5
apiserver/plane/ee/apps.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class EnterpriseConfig(AppConfig):
|
||||
name = "plane.ee"
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user