mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Merge branch 'preview' into sync/ce-ee
This commit is contained in:
@@ -19,6 +19,7 @@ AWS_S3_BUCKET_NAME="uploads"
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
|
||||
# GPT settings
|
||||
SILO_BASE_URL=
|
||||
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||
OPENAI_API_KEY="sk-" # deprecated
|
||||
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||
@@ -31,3 +32,8 @@ USE_MINIO=1
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
|
||||
# Imports Config
|
||||
SILO_BASE_URL=
|
||||
|
||||
MONGO_DB_URL="mongodb://plane-mongodb:27017/"
|
||||
|
||||
11
.github/dependabot.yml
vendored
Normal file
11
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
204
.github/workflows/build-aio-branch-ee.yml
vendored
Normal file
204
.github/workflows/build-aio-branch-ee.yml
vendored
Normal file
@@ -0,0 +1,204 @@
|
||||
name: Branch Build AIO
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
full:
|
||||
description: 'Run full build'
|
||||
type: boolean
|
||||
required: false
|
||||
default: true
|
||||
slim:
|
||||
description: 'Run slim build'
|
||||
type: boolean
|
||||
required: false
|
||||
default: true
|
||||
base_tag_name:
|
||||
description: 'Base Tag Name'
|
||||
required: false
|
||||
default: ''
|
||||
release:
|
||||
types: [released, prereleased]
|
||||
|
||||
env:
|
||||
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
|
||||
IS_PRERELEASE: ${{ github.event.release.prerelease }}
|
||||
FULL_BUILD_INPUT: ${{ github.event.inputs.full }}
|
||||
SLIM_BUILD_INPUT: ${{ github.event.inputs.slim }}
|
||||
|
||||
jobs:
|
||||
branch_build_setup:
|
||||
name: Build Setup
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
aio_base_tag: ${{ steps.set_env_variables.outputs.AIO_BASE_TAG }}
|
||||
do_full_build: ${{ steps.set_env_variables.outputs.DO_FULL_BUILD }}
|
||||
do_slim_build: ${{ steps.set_env_variables.outputs.DO_SLIM_BUILD }}
|
||||
|
||||
steps:
|
||||
- id: set_env_variables
|
||||
name: Set Environment Variables
|
||||
run: |
|
||||
if [ [ "${{ github.event_name }}" == "release" ] && [ "${{ env.IS_PRERELEASE }}" != "true" ] ; then
|
||||
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "AIO_BASE_TAG=latest" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ github.event.inputs.base_tag_name }}" != "" ]; then
|
||||
echo "AIO_BASE_TAG=${{ github.event.inputs.base_tag_name }}" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "preview" ]; then
|
||||
echo "AIO_BASE_TAG=preview" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "AIO_BASE_TAG=develop" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
fi
|
||||
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ env.FULL_BUILD_INPUT }}" == "true" ] || [ "${{github.event_name}}" == "push" ] || [ "${{github.event_name}}" == "release" ]; then
|
||||
echo "DO_FULL_BUILD=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "DO_FULL_BUILD=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [ "${{ env.SLIM_BUILD_INPUT }}" == "true" ] || [ "${{github.event_name}}" == "push" ] || [ "${{github.event_name}}" == "release" ]; then
|
||||
echo "DO_SLIM_BUILD=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "DO_SLIM_BUILD=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
full_build_push:
|
||||
if: ${{ needs.branch_build_setup.outputs.do_full_build == 'true' }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BUILD_TYPE: full
|
||||
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
|
||||
AIO_IMAGE_TAGS: makeplane/plane-aio-enterprise:full-${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/plane-aio-enterprise:${{env.BUILD_TYPE}}-stable,makeplane/plane-aio-enterprise:${{env.BUILD_TYPE}}-${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-aio-enterprise:${{env.BUILD_TYPE}}-latest
|
||||
else
|
||||
TAG=${{ env.AIO_IMAGE_TAGS }}
|
||||
fi
|
||||
echo "AIO_IMAGE_TAGS=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./aio/Dockerfile-app
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.AIO_IMAGE_TAGS }}
|
||||
push: true
|
||||
build-args: |
|
||||
BUILD_TAG=${{ env.AIO_BASE_TAG }}
|
||||
BUILD_TYPE=${{env.BUILD_TYPE}}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
slim_build_push:
|
||||
if: ${{ needs.branch_build_setup.outputs.do_slim_build == 'true' }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BUILD_TYPE: slim
|
||||
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
|
||||
AIO_IMAGE_TAGS: makeplane/plane-aio-enterprise:slim-${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/plane-aio-enterprise:${{env.BUILD_TYPE}}-stable,makeplane/plane-aio-enterprise:${{env.BUILD_TYPE}}-${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-aio-enterprise:${{env.BUILD_TYPE}}-latest
|
||||
else
|
||||
TAG=${{ env.AIO_IMAGE_TAGS }}
|
||||
fi
|
||||
echo "AIO_IMAGE_TAGS=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./aio/Dockerfile-app
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.AIO_IMAGE_TAGS }}
|
||||
push: true
|
||||
build-args: |
|
||||
BUILD_TAG=${{ env.AIO_BASE_TAG }}
|
||||
BUILD_TYPE=${{env.BUILD_TYPE}}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
358
.github/workflows/build-branch-cloud.yml
vendored
Normal file
358
.github/workflows/build-branch-cloud.yml
vendored
Normal file
@@ -0,0 +1,358 @@
|
||||
name: Branch Build Enterprise Cloud
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [released, prereleased]
|
||||
|
||||
env:
|
||||
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
|
||||
VAULT_KP_PREFIX: plane-ee-cloud-builds
|
||||
|
||||
jobs:
|
||||
branch_build_setup:
|
||||
name: Build Setup
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||
flat_branch_name: ${{ steps.set_env_variables.outputs.FLAT_BRANCH_NAME }}
|
||||
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
|
||||
steps:
|
||||
- id: set_env_variables
|
||||
name: Set Environment Variables
|
||||
run: |
|
||||
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||
|
||||
FLAT_BRANCH_NAME=$(echo "${{ env.TARGET_BRANCH }}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "FLAT_BRANCH_NAME=$FLAT_BRANCH_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Generate Keypair
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
ssh-keygen -t ed25519 -m PEM -f monitor/prime.key -N ""
|
||||
echo "-----------------"
|
||||
echo ""
|
||||
cat monitor/prime.key.pub
|
||||
echo ""
|
||||
echo "-----------------"
|
||||
else
|
||||
echo "${{ secrets.DEFAULT_PRIME_PRIVATE_KEY }}" > monitor/prime.key
|
||||
fi
|
||||
|
||||
branch_build_push_admin:
|
||||
name: Build-Push Admin Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
ADMIN_CLOUD_TAG: makeplane/admin-enterprise-cloud:${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Admin Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
CLOUD_TAG=makeplane/admin-enterprise-cloud:stable
|
||||
CLOUD_TAG=${CLOUD_TAG},makeplane/admin-enterprise-cloud:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
CLOUD_TAG=makeplane/admin-enterprise-cloud:latest
|
||||
else
|
||||
CLOUD_TAG=${{ env.ADMIN_CLOUD_TAG }}
|
||||
fi
|
||||
echo "ADMIN_CLOUD_TAG=${CLOUD_TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Tailscale
|
||||
uses: tailscale/github-action@v2
|
||||
with:
|
||||
oauth-client-id: ${{ secrets.TAILSCALE_OAUTH_CLIENT_ID }}
|
||||
oauth-secret: ${{ secrets.TAILSCALE_OAUTH_SECRET }}
|
||||
tags: tag:ci
|
||||
|
||||
- name: Get the ENV values from Vault
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
ENV_NAME="prod"
|
||||
else
|
||||
ENV_NAME="stage"
|
||||
fi
|
||||
|
||||
curl -fsSL \
|
||||
--header "X-Vault-Token: ${{ secrets.VAULT_TOKEN }}" \
|
||||
--request GET \
|
||||
${{ vars.VAULT_HOST }}/v1/kv/git-builds/data/${{ env.VAULT_KP_PREFIX }}-${ENV_NAME} | jq .data.data > values.json
|
||||
|
||||
jq -r 'to_entries|map("\(.key)=\(.value|tostring)")|.[]' values.json >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Admin-Cloud to Docker Container Registry
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./admin/Dockerfile.admin
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.ADMIN_CLOUD_TAG }}
|
||||
push: true
|
||||
build-args: |
|
||||
NEXT_PUBLIC_WEB_BASE_URL=${{ env.NEXT_PUBLIC_WEB_BASE_URL }}
|
||||
NEXT_PUBLIC_SPACE_BASE_URL=${{ env.NEXT_PUBLIC_SPACE_BASE_URL }}
|
||||
NEXT_PUBLIC_ADMIN_BASE_URL=${{ env.NEXT_PUBLIC_ADMIN_BASE_URL }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
NEXT_PUBLIC_ADMIN_BASE_PATH=${{ env.NEXT_PUBLIC_ADMIN_BASE_PATH }}
|
||||
NEXT_PUBLIC_SPACE_BASE_PATH=${{ env.NEXT_PUBLIC_SPACE_BASE_PATH }}
|
||||
NEXT_PUBLIC_API_BASE_PATH=${{ env.NEXT_PUBLIC_API_BASE_PATH }}
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_web:
|
||||
name: Build-Push Web Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
WEB_CLOUD_IMAGE: makeplane/web-enterprise-cloud
|
||||
WEB_CLOUD_TAG: makeplane/web-enterprise-cloud:${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Web Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
CLOUD_TAG=${{env.WEB_CLOUD_IMAGE}}:stable
|
||||
CLOUD_TAG=${CLOUD_TAG},${{env.WEB_CLOUD_IMAGE}}:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
CLOUD_TAG=${{env.WEB_CLOUD_IMAGE}}:latest
|
||||
else
|
||||
CLOUD_TAG=${{ env.WEB_CLOUD_TAG }}
|
||||
fi
|
||||
echo "WEB_CLOUD_TAG=${CLOUD_TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Tailscale
|
||||
uses: tailscale/github-action@v2
|
||||
with:
|
||||
oauth-client-id: ${{ secrets.TAILSCALE_OAUTH_CLIENT_ID }}
|
||||
oauth-secret: ${{ secrets.TAILSCALE_OAUTH_SECRET }}
|
||||
tags: tag:ci
|
||||
|
||||
- name: Get the ENV values from Vault
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
ENV_NAME="prod"
|
||||
else
|
||||
ENV_NAME="stage"
|
||||
fi
|
||||
|
||||
curl -fsSL \
|
||||
--header "X-Vault-Token: ${{ secrets.VAULT_TOKEN }}" \
|
||||
--request GET \
|
||||
${{ vars.VAULT_HOST }}/v1/kv/git-builds/data/${{ env.VAULT_KP_PREFIX }}-${ENV_NAME} | jq .data.data > values.json
|
||||
|
||||
jq -r 'to_entries|map("\(.key)=\(.value|tostring)")|.[]' values.json >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Web Cloud to Docker Container Registry
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./web/Dockerfile.web
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.WEB_CLOUD_TAG }}
|
||||
push: true
|
||||
build-args: |
|
||||
NEXT_PUBLIC_WEB_BASE_URL=${{ env.NEXT_PUBLIC_WEB_BASE_URL }}
|
||||
NEXT_PUBLIC_SPACE_BASE_URL=${{ env.NEXT_PUBLIC_SPACE_BASE_URL }}
|
||||
NEXT_PUBLIC_ADMIN_BASE_URL=${{ env.NEXT_PUBLIC_ADMIN_BASE_URL }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
NEXT_PUBLIC_ADMIN_BASE_PATH=${{ env.NEXT_PUBLIC_ADMIN_BASE_PATH }}
|
||||
NEXT_PUBLIC_SPACE_BASE_PATH=${{ env.NEXT_PUBLIC_SPACE_BASE_PATH }}
|
||||
NEXT_PUBLIC_API_BASE_PATH=${{ env.NEXT_PUBLIC_API_BASE_PATH }}
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_space:
|
||||
name: Build-Push Space Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
SPACE_CLOUD_IMAGE: makeplane/space-enterprise-cloud
|
||||
SPACE_CLOUD_TAG: makeplane/space-enterprise-cloud:${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Space Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
CLOUD_TAG=${{env.SPACE_CLOUD_IMAGE}}:stable
|
||||
CLOUD_TAG=${CLOUD_TAG},${{env.SPACE_CLOUD_IMAGE}}:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
CLOUD_TAG=${{env.SPACE_CLOUD_IMAGE}}:latest
|
||||
else
|
||||
CLOUD_TAG=${{ env.SPACE_CLOUD_TAG }}
|
||||
fi
|
||||
echo "SPACE_CLOUD_TAG=${CLOUD_TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Tailscale
|
||||
uses: tailscale/github-action@v2
|
||||
with:
|
||||
oauth-client-id: ${{ secrets.TAILSCALE_OAUTH_CLIENT_ID }}
|
||||
oauth-secret: ${{ secrets.TAILSCALE_OAUTH_SECRET }}
|
||||
tags: tag:ci
|
||||
|
||||
- name: Get the ENV values from Vault
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
ENV_NAME="prod"
|
||||
else
|
||||
ENV_NAME="stage"
|
||||
fi
|
||||
|
||||
curl -fsSL \
|
||||
--header "X-Vault-Token: ${{ secrets.VAULT_TOKEN }}" \
|
||||
--request GET \
|
||||
${{ vars.VAULT_HOST }}/v1/kv/git-builds/data/${{ env.VAULT_KP_PREFIX }}-${ENV_NAME} | jq .data.data > values.json
|
||||
|
||||
jq -r 'to_entries|map("\(.key)=\(.value|tostring)")|.[]' values.json >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Space-Cloud to Docker Container Registry
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./space/Dockerfile.space
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.SPACE_CLOUD_TAG }}
|
||||
push: true
|
||||
build-args: |
|
||||
NEXT_PUBLIC_WEB_BASE_URL=${{ env.NEXT_PUBLIC_WEB_BASE_URL }}
|
||||
NEXT_PUBLIC_SPACE_BASE_URL=${{ env.NEXT_PUBLIC_SPACE_BASE_URL }}
|
||||
NEXT_PUBLIC_ADMIN_BASE_URL=${{ env.NEXT_PUBLIC_ADMIN_BASE_URL }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
NEXT_PUBLIC_ADMIN_BASE_PATH=${{ env.NEXT_PUBLIC_ADMIN_BASE_PATH }}
|
||||
NEXT_PUBLIC_SPACE_BASE_PATH=${{ env.NEXT_PUBLIC_SPACE_BASE_PATH }}
|
||||
NEXT_PUBLIC_API_BASE_PATH=${{ env.NEXT_PUBLIC_API_BASE_PATH }}
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_apiserver:
|
||||
name: Build-Push API Server Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BACKEND_CLOUD_IMAGE: makeplane/backend-enterprise-cloud
|
||||
BACKEND_CLOUD_TAG: makeplane/backend-enterprise-cloud:${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Backend Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
CLOUD_TAG=${{env.BACKEND_CLOUD_IMAGE}}:stable
|
||||
CLOUD_TAG=${CLOUD_TAG},${{env.BACKEND_CLOUD_IMAGE}}:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
CLOUD_TAG=${{env.BACKEND_CLOUD_IMAGE}}:latest
|
||||
else
|
||||
CLOUD_TAG=${{ env.BACKEND_CLOUD_TAG }}
|
||||
fi
|
||||
echo "BACKEND_CLOUD_TAG=${CLOUD_TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Backend-Cloud to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: ./apiserver
|
||||
file: ./apiserver/Dockerfile.api
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
push: true
|
||||
tags: ${{ env.BACKEND_CLOUD_TAG }}
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
548
.github/workflows/build-branch-ee.yml
vendored
Normal file
548
.github/workflows/build-branch-ee.yml
vendored
Normal file
@@ -0,0 +1,548 @@
|
||||
name: Branch Build Enterprise
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
arm64:
|
||||
description: "Build for ARM64 architecture"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- preview
|
||||
release:
|
||||
types: [released, prereleased]
|
||||
|
||||
env:
|
||||
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
|
||||
ARM64_BUILD: ${{ github.event.inputs.arm64 }}
|
||||
IS_PRERELEASE: ${{ github.event.release.prerelease }}
|
||||
|
||||
jobs:
|
||||
branch_build_setup:
|
||||
name: Build Setup
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
outputs:
|
||||
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
build_web: ${{ steps.changed_files.outputs.web_any_changed }}
|
||||
build_admin: ${{ steps.changed_files.outputs.admin_any_changed }}
|
||||
build_space: ${{ steps.changed_files.outputs.space_any_changed }}
|
||||
build_apiserver: ${{ steps.changed_files.outputs.apiserver_any_changed }}
|
||||
build_proxy: ${{ steps.changed_files.outputs.proxy_any_changed }}
|
||||
build_monitor: ${{ steps.changed_files.outputs.monitor_any_changed }}
|
||||
artifact_upload_to_s3: ${{ steps.set_env_variables.outputs.artifact_upload_to_s3 }}
|
||||
artifact_s3_suffix: ${{ steps.set_env_variables.outputs.artifact_s3_suffix }}
|
||||
|
||||
steps:
|
||||
- id: set_env_variables
|
||||
name: Set Environment Variables
|
||||
run: |
|
||||
if [ "${{ env.ARM64_BUILD }}" == "true" ] || ([ "${{ github.event_name }}" == "release" ] && [ "${{ env.IS_PRERELEASE }}" != "true" ]); then
|
||||
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
BR_NAME=$( echo "${{ env.TARGET_BRANCH }}" | tr / -)
|
||||
echo "TARGET_BRANCH=$BR_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
echo "artifact_upload_to_s3=true" >> $GITHUB_OUTPUT
|
||||
echo "artifact_s3_suffix=${{ github.event.release.tag_name }}" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
echo "artifact_upload_to_s3=true" >> $GITHUB_OUTPUT
|
||||
echo "artifact_s3_suffix=latest" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "preview" ] || [ "${{ env.TARGET_BRANCH }}" == "develop" ]; then
|
||||
echo "artifact_upload_to_s3=true" >> $GITHUB_OUTPUT
|
||||
echo "artifact_s3_suffix=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "artifact_upload_to_s3=false" >> $GITHUB_OUTPUT
|
||||
echo "artifact_s3_suffix=$BR_NAME" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
uses: tj-actions/changed-files@v42
|
||||
with:
|
||||
files_yaml: |
|
||||
apiserver:
|
||||
- apiserver/**
|
||||
proxy:
|
||||
- nginx/**
|
||||
admin:
|
||||
- admin/**
|
||||
- packages/**
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
space:
|
||||
- space/**
|
||||
- packages/**
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
web:
|
||||
- web/**
|
||||
- packages/**
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
monitor:
|
||||
- monitor/**
|
||||
|
||||
- name: Generate Keypair
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
ssh-keygen -t ed25519 -m PEM -f monitor/prime.key -N ""
|
||||
echo "-----------------"
|
||||
echo ""
|
||||
cat monitor/prime.key.pub
|
||||
echo ""
|
||||
echo "-----------------"
|
||||
else
|
||||
echo "${{ secrets.DEFAULT_PRIME_PRIVATE_KEY }}" > monitor/prime.key
|
||||
fi
|
||||
|
||||
branch_build_push_admin:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_admin== 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Admin Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
ADMIN_TAG: makeplane/admin-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Admin Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/admin-enterprise:stable
|
||||
TAG=${TAG},makeplane/admin-enterprise:${{ github.event.release.tag_name }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/admin-enterprise:stable
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/admin-enterprise:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/admin-enterprise:latest
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/admin-enterprise:latest
|
||||
else
|
||||
TAG=${{ env.ADMIN_TAG }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/admin-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
fi
|
||||
echo "ADMIN_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Harbor
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
password: ${{ secrets.HARBOR_TOKEN }}
|
||||
registry: ${{ vars.HARBOR_REGISTRY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Frontend to Docker Container Registry
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./admin/Dockerfile.admin
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.ADMIN_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_web:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_web == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Web Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
WEB_TAG: makeplane/web-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Web Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/web-enterprise:stable
|
||||
TAG=${TAG},makeplane/web-enterprise:${{ github.event.release.tag_name }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/web-enterprise:stable
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/web-enterprise:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/web-enterprise:latest
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/web-enterprise:latest
|
||||
else
|
||||
TAG=${{ env.WEB_TAG }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/web-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
fi
|
||||
echo "WEB_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Harbor
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
password: ${{ secrets.HARBOR_TOKEN }}
|
||||
registry: ${{ vars.HARBOR_REGISTRY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Web to Docker Container Registry
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./web/Dockerfile.web
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.WEB_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_space:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Space Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
SPACE_TAG: makeplane/space-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Space Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/space-enterprise:stable
|
||||
TAG=${TAG},makeplane/space-enterprise:${{ github.event.release.tag_name }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/space-enterprise:stable
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/space-enterprise:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/space-enterprise:latest
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/space-enterprise:latest
|
||||
else
|
||||
TAG=${{ env.SPACE_TAG }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/space-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
fi
|
||||
echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Harbor
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
password: ${{ secrets.HARBOR_TOKEN }}
|
||||
registry: ${{ vars.HARBOR_REGISTRY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Space to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./space/Dockerfile.space
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.SPACE_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_apiserver:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_apiserver == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push API Server Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BACKEND_TAG: makeplane/backend-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Backend Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/backend-enterprise:stable
|
||||
TAG=${TAG},makeplane/backend-enterprise:${{ github.event.release.tag_name }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/backend-enterprise:stable
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/backend-enterprise:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/backend-enterprise:latest
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/backend-enterprise:latest
|
||||
else
|
||||
TAG=${{ env.BACKEND_TAG }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/backend-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
fi
|
||||
echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Harbor
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
password: ${{ secrets.HARBOR_TOKEN }}
|
||||
registry: ${{ vars.HARBOR_REGISTRY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Backend to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: ./apiserver
|
||||
file: ./apiserver/Dockerfile.api
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
push: true
|
||||
tags: ${{ env.BACKEND_TAG }}
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_proxy:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Proxy Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
PROXY_TAG: makeplane/proxy-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Proxy Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/proxy-enterprise:stable
|
||||
TAG=${TAG},makeplane/proxy-enterprise:${{ github.event.release.tag_name }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/proxy-enterprise:stable
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/proxy-enterprise:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/proxy-enterprise:latest
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/proxy-enterprise:latest
|
||||
else
|
||||
TAG=${{ env.PROXY_TAG }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/proxy-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
fi
|
||||
echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Harbor
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
password: ${{ secrets.HARBOR_TOKEN }}
|
||||
registry: ${{ vars.HARBOR_REGISTRY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Plane-Proxy to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: ./nginx
|
||||
file: ./nginx/Dockerfile
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.PROXY_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_monitor:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_monitor == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Monitor Docker Image
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
MONITOR_TAG: makeplane/monitor-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Monitor Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/monitor-enterprise:stable
|
||||
TAG=${TAG},makeplane/monitor-enterprise:${{ github.event.release.tag_name }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/monitor-enterprise:stable
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/monitor-enterprise:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/monitor-enterprise:latest
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/monitor-enterprise:latest
|
||||
else
|
||||
TAG=${{ env.MONITOR_TAG }}
|
||||
TAG=${TAG},${{ vars.HARBOR_REGISTRY }}/${{ vars.HARBOR_PROJECT }}/monitor-enterprise:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
fi
|
||||
echo "MONITOR_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Harbor
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
password: ${{ secrets.HARBOR_TOKEN }}
|
||||
registry: ${{ vars.HARBOR_REGISTRY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Monitor to Docker Container Registry
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: ./monitor
|
||||
file: ./monitor/Dockerfile
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.MONITOR_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
upload_artifacts_s3:
|
||||
if: ${{ needs.branch_build_setup.outputs.artifact_upload_to_s3 == 'true' }}
|
||||
name: Upload artifacts to S3 Bucket
|
||||
runs-on: ${{vars.ACTION_RUNS_ON}}
|
||||
needs: [branch_build_setup]
|
||||
container:
|
||||
image: docker:20.10.7
|
||||
credentials:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
env:
|
||||
ARTIFACT_SUFFIX: ${{ needs.branch_build_setup.outputs.artifact_s3_suffix }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.SELF_HOST_BUCKET_ACCESS_KEY }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.SELF_HOST_BUCKET_SECRET_KEY }}
|
||||
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
|
||||
steps:
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Upload artifacts
|
||||
run: |
|
||||
apk update
|
||||
apk add --no-cache aws-cli
|
||||
|
||||
mkdir -p ~/${{ env.ARTIFACT_SUFFIX }}
|
||||
|
||||
cp deploy/cli-install/variables.env ~/${{ env.ARTIFACT_SUFFIX }}/variables.env
|
||||
cp deploy/cli-install/Caddyfile ~/${{ env.ARTIFACT_SUFFIX }}/Caddyfile
|
||||
sed -e 's@${APP_RELEASE_VERSION}@'${{ env.ARTIFACT_SUFFIX }}'@' deploy/cli-install/docker-compose.yml > ~/${{ env.ARTIFACT_SUFFIX }}/docker-compose.yml
|
||||
sed -e 's@${APP_RELEASE_VERSION}@'${{ env.ARTIFACT_SUFFIX }}'@' deploy/cli-install/docker-compose-caddy.yml > ~/${{ env.ARTIFACT_SUFFIX }}/docker-compose-caddy.yml
|
||||
|
||||
aws s3 cp ~/${{ env.ARTIFACT_SUFFIX }} s3://${{ vars.SELF_HOST_BUCKET_NAME }}/plane-enterprise/${{ env.ARTIFACT_SUFFIX }} --recursive
|
||||
|
||||
rm -rf ~/${{ env.ARTIFACT_SUFFIX }}
|
||||
162
.github/workflows/build-test-pull-request-ee.yml
vendored
Normal file
162
.github/workflows/build-test-pull-request-ee.yml
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
name: Build and Lint on Pull Request EE
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
jobs:
|
||||
get-changed-files:
|
||||
if: github.event.issue.pull_request != '' && github.event.comment.body == 'build-test-pr'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
apiserver_changed: ${{ steps.changed-files.outputs.apiserver_any_changed }}
|
||||
admin_changed: ${{ steps.changed-files.outputs.admin_any_changed }}
|
||||
space_changed: ${{ steps.changed-files.outputs.space_any_changed }}
|
||||
web_changed: ${{ steps.changed-files.outputs.web_any_changed }}
|
||||
monitor_changed: ${{ steps.changed-files.outputs.monitor_any_changed }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v44
|
||||
with:
|
||||
files_yaml: |
|
||||
apiserver:
|
||||
- apiserver/**
|
||||
admin:
|
||||
- admin/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
space:
|
||||
- space/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
web:
|
||||
- web/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
monitor:
|
||||
- monitor/**
|
||||
|
||||
lint-apiserver:
|
||||
needs: get-changed-files
|
||||
runs-on: ubuntu-latest
|
||||
if: needs.get-changed-files.outputs.apiserver_changed == 'true'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x" # Specify the Python version you need
|
||||
- name: Install Pylint
|
||||
run: python -m pip install ruff
|
||||
- name: Install Apiserver Dependencies
|
||||
run: cd apiserver && pip install -r requirements.txt
|
||||
- name: Lint apiserver
|
||||
run: ruff check --fix apiserver
|
||||
|
||||
lint-admin:
|
||||
needs: get-changed-files
|
||||
if: needs.get-changed-files.outputs.admin_changed == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.x
|
||||
- run: yarn install
|
||||
- run: yarn lint --filter=admin
|
||||
|
||||
lint-space:
|
||||
needs: get-changed-files
|
||||
if: needs.get-changed-files.outputs.space_changed == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.x
|
||||
- run: yarn install
|
||||
- run: yarn lint --filter=space
|
||||
|
||||
lint-web:
|
||||
needs: get-changed-files
|
||||
if: needs.get-changed-files.outputs.web_changed == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.x
|
||||
- run: yarn install
|
||||
- run: yarn lint --filter=web
|
||||
|
||||
test-monitor:
|
||||
needs: get-changed-files
|
||||
if: needs.get-changed-files.outputs.monitor_changed == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22.2"
|
||||
- run: cd ./monitor && make test
|
||||
|
||||
build-admin:
|
||||
needs: lint-admin
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.x
|
||||
- run: yarn install
|
||||
- run: yarn build --filter=admin
|
||||
|
||||
build-space:
|
||||
needs: lint-space
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.x
|
||||
- run: yarn install
|
||||
- run: yarn build --filter=space
|
||||
|
||||
build-web:
|
||||
needs: lint-web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.x
|
||||
- run: yarn install
|
||||
- run: yarn build --filter=web
|
||||
|
||||
build-monitor:
|
||||
needs: test-monitor
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22.2"
|
||||
- run: cd ./monitor && make build
|
||||
70
.github/workflows/create-release.yml
vendored
Normal file
70
.github/workflows/create-release.yml
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
name: Manual Release Workflow
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_tag:
|
||||
description: 'Release Tag (e.g., v0.16-cannary-1)'
|
||||
required: true
|
||||
prerelease:
|
||||
description: 'Pre-Release'
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
draft:
|
||||
description: 'Draft'
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
create-release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 # Necessary to fetch all history for tags
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config user.name "github-actions"
|
||||
git config user.email "github-actions@github.com"
|
||||
|
||||
- name: Check for the Prerelease
|
||||
run: |
|
||||
echo ${{ github.event.release.prerelease }}
|
||||
|
||||
- name: Generate Release Notes
|
||||
id: generate_notes
|
||||
run: |
|
||||
bash ./generate_release_notes.sh
|
||||
# Directly use the content of RELEASE_NOTES.md for the release body
|
||||
RELEASE_NOTES=$(cat RELEASE_NOTES.md)
|
||||
echo "RELEASE_NOTES<<EOF" >> $GITHUB_ENV
|
||||
echo "$RELEASE_NOTES" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
- name: Create Tag
|
||||
run: |
|
||||
git tag ${{ github.event.inputs.release_tag }}
|
||||
git push origin ${{ github.event.inputs.release_tag }}
|
||||
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
tag_name: ${{ github.event.inputs.release_tag }}
|
||||
body_path: RELEASE_NOTES.md
|
||||
draft: ${{ github.event.inputs.draft }}
|
||||
prerelease: ${{ github.event.inputs.prerelease }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -85,3 +85,5 @@ deploy/selfhost/plane-app/
|
||||
## Storybook
|
||||
*storybook.log
|
||||
output.css
|
||||
monitor/prime.key
|
||||
monitor/prime.key.pub
|
||||
|
||||
244
admin/app/authentication/oidc/form.tsx
Normal file
244
admin/app/authentication/oidc/form.tsx
Normal file
@@ -0,0 +1,244 @@
|
||||
import { FC, useState } from "react";
|
||||
import Link from "next/link";
|
||||
import { useForm } from "react-hook-form";
|
||||
// types
|
||||
import { IFormattedInstanceConfiguration, TInstanceOIDCAuthenticationConfigurationKeys } from "@plane/types";
|
||||
// ui
|
||||
import { Button, TOAST_TYPE, getButtonStyling, setToast } from "@plane/ui";
|
||||
// components
|
||||
import {
|
||||
ConfirmDiscardModal,
|
||||
ControllerInput,
|
||||
TControllerInputFormField,
|
||||
CopyField,
|
||||
TCopyField,
|
||||
CodeBlock,
|
||||
} from "@/components/common";
|
||||
// helpers
|
||||
import { cn } from "@/helpers/common.helper";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
|
||||
type Props = {
|
||||
config: IFormattedInstanceConfiguration;
|
||||
};
|
||||
|
||||
type OIDCConfigFormValues = Record<TInstanceOIDCAuthenticationConfigurationKeys, string>;
|
||||
|
||||
export const InstanceOIDCConfigForm: FC<Props> = (props) => {
|
||||
const { config } = props;
|
||||
// states
|
||||
const [isDiscardChangesModalOpen, setIsDiscardChangesModalOpen] = useState(false);
|
||||
// store hooks
|
||||
const { updateInstanceConfigurations } = useInstance();
|
||||
// form data
|
||||
const {
|
||||
handleSubmit,
|
||||
control,
|
||||
reset,
|
||||
formState: { errors, isDirty, isSubmitting },
|
||||
} = useForm<OIDCConfigFormValues>({
|
||||
defaultValues: {
|
||||
OIDC_CLIENT_ID: config["OIDC_CLIENT_ID"],
|
||||
OIDC_CLIENT_SECRET: config["OIDC_CLIENT_SECRET"],
|
||||
OIDC_TOKEN_URL: config["OIDC_TOKEN_URL"],
|
||||
OIDC_USERINFO_URL: config["OIDC_USERINFO_URL"],
|
||||
OIDC_AUTHORIZE_URL: config["OIDC_AUTHORIZE_URL"],
|
||||
OIDC_LOGOUT_URL: config["OIDC_LOGOUT_URL"],
|
||||
OIDC_PROVIDER_NAME: config["OIDC_PROVIDER_NAME"],
|
||||
},
|
||||
});
|
||||
|
||||
const originURL = typeof window !== "undefined" ? window.location.origin : "";
|
||||
|
||||
const OIDC_FORM_FIELDS: TControllerInputFormField[] = [
|
||||
{
|
||||
key: "OIDC_CLIENT_ID",
|
||||
type: "text",
|
||||
label: "Client ID",
|
||||
description: "A unique ID for this Plane app that you register on your IdP",
|
||||
placeholder: "abc123xyz789",
|
||||
error: Boolean(errors.OIDC_CLIENT_ID),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "OIDC_CLIENT_SECRET",
|
||||
type: "password",
|
||||
label: "Client secret",
|
||||
description: "The secret key that authenticates this Plane app to your IdP",
|
||||
placeholder: "s3cr3tK3y123!",
|
||||
error: Boolean(errors.OIDC_CLIENT_SECRET),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "OIDC_AUTHORIZE_URL",
|
||||
type: "text",
|
||||
label: "Authorize URL",
|
||||
description: (
|
||||
<>
|
||||
The URL that brings up your IdP{"'"}s authentication screen when your users click the{" "}
|
||||
<CodeBlock>{"Continue with"}</CodeBlock>
|
||||
</>
|
||||
),
|
||||
placeholder: "https://example.com/",
|
||||
error: Boolean(errors.OIDC_AUTHORIZE_URL),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "OIDC_TOKEN_URL",
|
||||
type: "text",
|
||||
label: "Token URL",
|
||||
description: "The URL that talks to the IdP and persists user authentication on Plane",
|
||||
placeholder: "https://example.com/oauth/token",
|
||||
error: Boolean(errors.OIDC_TOKEN_URL),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "OIDC_USERINFO_URL",
|
||||
type: "text",
|
||||
label: "Users' info URL",
|
||||
description: "The URL that fetches your users' info from your IdP",
|
||||
placeholder: "https://example.com/userinfo",
|
||||
error: Boolean(errors.OIDC_USERINFO_URL),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "OIDC_LOGOUT_URL",
|
||||
type: "text",
|
||||
label: "Logout URL",
|
||||
description: "Optional field that controls where your users go after they log out of Plane",
|
||||
placeholder: "https://example.com/logout",
|
||||
error: Boolean(errors.OIDC_LOGOUT_URL),
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
key: "OIDC_PROVIDER_NAME",
|
||||
type: "text",
|
||||
label: "IdP's name",
|
||||
description: (
|
||||
<>
|
||||
Optional field for the name that your users see on the <CodeBlock>Continue with</CodeBlock> button
|
||||
</>
|
||||
),
|
||||
placeholder: "Okta",
|
||||
error: Boolean(errors.OIDC_PROVIDER_NAME),
|
||||
required: false,
|
||||
},
|
||||
];
|
||||
|
||||
const OIDC_SERVICE_DETAILS: TCopyField[] = [
|
||||
{
|
||||
key: "Origin_URI",
|
||||
label: "Origin URI",
|
||||
url: `${originURL}/auth/oidc/`,
|
||||
description:
|
||||
"We will generate this for this Plane app. Add this as a trusted origin on your IdP's corresponding field.",
|
||||
},
|
||||
{
|
||||
key: "Callback_URI",
|
||||
label: "Callback URI",
|
||||
url: `${originURL}/auth/oidc/callback/`,
|
||||
description: (
|
||||
<>
|
||||
We will generate this for you.Add this in the{" "}
|
||||
<CodeBlock darkerShade>Sign-in redirect URI</CodeBlock> field of
|
||||
your IdP.
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: "Logout_URI",
|
||||
label: "Logout URI",
|
||||
url: `${originURL}/auth/oidc/logout/`,
|
||||
description: (
|
||||
<>
|
||||
We will generate this for you. Add this in the{" "}
|
||||
<CodeBlock darkerShade>Logout redirect URI</CodeBlock> field of
|
||||
your IdP.
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
const onSubmit = async (formData: OIDCConfigFormValues) => {
|
||||
const payload: Partial<OIDCConfigFormValues> = { ...formData };
|
||||
|
||||
await updateInstanceConfigurations(payload)
|
||||
.then((response = []) => {
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Done!",
|
||||
message: "Your OIDC-based authentication is configured. You should test it now.",
|
||||
});
|
||||
reset({
|
||||
OIDC_CLIENT_ID: response.find((item) => item.key === "OIDC_CLIENT_ID")?.value,
|
||||
OIDC_CLIENT_SECRET: response.find((item) => item.key === "OIDC_CLIENT_SECRET")?.value,
|
||||
OIDC_AUTHORIZE_URL: response.find((item) => item.key === "OIDC_AUTHORIZE_URL")?.value,
|
||||
OIDC_TOKEN_URL: response.find((item) => item.key === "OIDC_TOKEN_URL")?.value,
|
||||
OIDC_USERINFO_URL: response.find((item) => item.key === "OIDC_USERINFO_URL")?.value,
|
||||
OIDC_LOGOUT_URL: response.find((item) => item.key === "OIDC_LOGOUT_URL")?.value,
|
||||
OIDC_PROVIDER_NAME: response.find((item) => item.key === "OIDC_PROVIDER_NAME")?.value,
|
||||
});
|
||||
})
|
||||
.catch((err) => console.error(err));
|
||||
};
|
||||
|
||||
const handleGoBack = (e: React.MouseEvent<HTMLAnchorElement, MouseEvent>) => {
|
||||
if (isDirty) {
|
||||
e.preventDefault();
|
||||
setIsDiscardChangesModalOpen(true);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<ConfirmDiscardModal
|
||||
isOpen={isDiscardChangesModalOpen}
|
||||
onDiscardHref="/authentication"
|
||||
handleClose={() => setIsDiscardChangesModalOpen(false)}
|
||||
/>
|
||||
<div className="flex flex-col gap-8">
|
||||
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1 pt-1">
|
||||
<div className="pt-2.5 text-xl font-medium">IdP-provided details for Plane</div>
|
||||
{OIDC_FORM_FIELDS.map((field) => (
|
||||
<ControllerInput
|
||||
key={field.key}
|
||||
control={control}
|
||||
type={field.type}
|
||||
name={field.key}
|
||||
label={field.label}
|
||||
description={field.description}
|
||||
placeholder={field.placeholder}
|
||||
error={field.error}
|
||||
required={field.required}
|
||||
/>
|
||||
))}
|
||||
<div className="flex flex-col gap-1 pt-4">
|
||||
<div className="flex items-center gap-4">
|
||||
<Button variant="primary" onClick={handleSubmit(onSubmit)} loading={isSubmitting} disabled={!isDirty}>
|
||||
{isSubmitting ? "Saving..." : "Save changes"}
|
||||
</Button>
|
||||
<Link
|
||||
href="/authentication"
|
||||
className={cn(getButtonStyling("link-neutral", "md"), "font-medium")}
|
||||
onClick={handleGoBack}
|
||||
>
|
||||
Go back
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="col-span-2 md:col-span-1">
|
||||
<div className="flex flex-col gap-y-4 px-6 pt-1.5 pb-4 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Plane-provided details for your IdP</div>
|
||||
{OIDC_SERVICE_DETAILS.map((field) => (
|
||||
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
102
admin/app/authentication/oidc/page.tsx
Normal file
102
admin/app/authentication/oidc/page.tsx
Normal file
@@ -0,0 +1,102 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import Image from "next/image";
|
||||
import { observer } from "mobx-react-lite";
|
||||
import useSWR from "swr";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// ui
|
||||
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
|
||||
// components
|
||||
import { PageHeader } from "@/components/common";
|
||||
import { AuthenticationMethodCard } from "@/components/authentication";
|
||||
import { InstanceOIDCConfigForm } from "./form";
|
||||
// icons
|
||||
import OIDCLogo from "/public/logos/oidc-logo.svg";
|
||||
|
||||
const InstanceOIDCAuthenticationPage = observer(() => {
|
||||
// store
|
||||
const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
|
||||
// state
|
||||
const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
|
||||
// config
|
||||
const enableOIDCConfig = formattedConfig?.IS_OIDC_ENABLED ?? "";
|
||||
|
||||
useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
|
||||
|
||||
const updateConfig = async (key: "IS_OIDC_ENABLED", value: string) => {
|
||||
setIsSubmitting(true);
|
||||
|
||||
const payload = {
|
||||
[key]: value,
|
||||
};
|
||||
|
||||
const updateConfigPromise = updateInstanceConfigurations(payload);
|
||||
|
||||
setPromiseToast(updateConfigPromise, {
|
||||
loading: "Saving Configuration...",
|
||||
success: {
|
||||
title: "Configuration saved",
|
||||
message: () => `OIDC authentication is now ${value ? "active" : "disabled"}.`,
|
||||
},
|
||||
error: {
|
||||
title: "Error",
|
||||
message: () => "Failed to save configuration",
|
||||
},
|
||||
});
|
||||
|
||||
await updateConfigPromise
|
||||
.then(() => {
|
||||
setIsSubmitting(false);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
setIsSubmitting(false);
|
||||
});
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Authentication - God Mode" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<AuthenticationMethodCard
|
||||
name="OIDC"
|
||||
description="Authenticate your users via the OpenID connect protocol."
|
||||
icon={<Image src={OIDCLogo} height={24} width={24} alt="OIDC Logo" />}
|
||||
config={
|
||||
<ToggleSwitch
|
||||
value={Boolean(parseInt(enableOIDCConfig))}
|
||||
onChange={() => {
|
||||
Boolean(parseInt(enableOIDCConfig)) === true
|
||||
? updateConfig("IS_OIDC_ENABLED", "0")
|
||||
: updateConfig("IS_OIDC_ENABLED", "1");
|
||||
}}
|
||||
size="sm"
|
||||
disabled={isSubmitting || !formattedConfig}
|
||||
/>
|
||||
}
|
||||
disabled={isSubmitting || !formattedConfig}
|
||||
withBorder={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<InstanceOIDCConfigForm config={formattedConfig} />
|
||||
) : (
|
||||
<Loader className="space-y-8">
|
||||
<Loader.Item height="50px" width="25%" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" width="50%" />
|
||||
</Loader>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
});
|
||||
|
||||
export default InstanceOIDCAuthenticationPage;
|
||||
245
admin/app/authentication/saml/form.tsx
Normal file
245
admin/app/authentication/saml/form.tsx
Normal file
@@ -0,0 +1,245 @@
|
||||
import { FC, useState } from "react";
|
||||
import Link from "next/link";
|
||||
import { Controller, useForm } from "react-hook-form";
|
||||
// types
|
||||
import { IFormattedInstanceConfiguration, TInstanceSAMLAuthenticationConfigurationKeys } from "@plane/types";
|
||||
// ui
|
||||
import { Button, TOAST_TYPE, TextArea, getButtonStyling, setToast } from "@plane/ui";
|
||||
// components
|
||||
import {
|
||||
ConfirmDiscardModal,
|
||||
ControllerInput,
|
||||
TControllerInputFormField,
|
||||
CopyField,
|
||||
TCopyField,
|
||||
CodeBlock,
|
||||
} from "@/components/common";
|
||||
// helpers
|
||||
import { cn } from "@/helpers/common.helper";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
import { SAMLAttributeMappingTable } from "@/plane-admin/components/authentication";
|
||||
|
||||
type Props = {
|
||||
config: IFormattedInstanceConfiguration;
|
||||
};
|
||||
|
||||
type SAMLConfigFormValues = Record<TInstanceSAMLAuthenticationConfigurationKeys, string>;
|
||||
|
||||
export const InstanceSAMLConfigForm: FC<Props> = (props) => {
|
||||
const { config } = props;
|
||||
// states
|
||||
const [isDiscardChangesModalOpen, setIsDiscardChangesModalOpen] = useState(false);
|
||||
// store hooks
|
||||
const { updateInstanceConfigurations } = useInstance();
|
||||
// form data
|
||||
const {
|
||||
handleSubmit,
|
||||
control,
|
||||
reset,
|
||||
formState: { errors, isDirty, isSubmitting },
|
||||
} = useForm<SAMLConfigFormValues>({
|
||||
defaultValues: {
|
||||
SAML_ENTITY_ID: config["SAML_ENTITY_ID"],
|
||||
SAML_SSO_URL: config["SAML_SSO_URL"],
|
||||
SAML_LOGOUT_URL: config["SAML_LOGOUT_URL"],
|
||||
SAML_CERTIFICATE: config["SAML_CERTIFICATE"],
|
||||
SAML_PROVIDER_NAME: config["SAML_PROVIDER_NAME"],
|
||||
},
|
||||
});
|
||||
|
||||
const originURL = typeof window !== "undefined" ? window.location.origin : "";
|
||||
|
||||
const SAML_FORM_FIELDS: TControllerInputFormField[] = [
|
||||
{
|
||||
key: "SAML_ENTITY_ID",
|
||||
type: "text",
|
||||
label: "Entity ID",
|
||||
description: "A unique ID for this Plane app that you register on your IdP",
|
||||
placeholder: "70a44354520df8bd9bcd",
|
||||
error: Boolean(errors.SAML_ENTITY_ID),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "SAML_SSO_URL",
|
||||
type: "text",
|
||||
label: "SSO URL",
|
||||
description: (
|
||||
<>
|
||||
The URL that brings up your IdP{"'"}s authentication screen when your users click the{" "}
|
||||
<CodeBlock>{"Continue with"}</CodeBlock> button
|
||||
</>
|
||||
),
|
||||
placeholder: "https://example.com/sso",
|
||||
error: Boolean(errors.SAML_SSO_URL),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "SAML_LOGOUT_URL",
|
||||
type: "text",
|
||||
label: "Logout URL",
|
||||
description: "Optional field that tells your IdP your users have logged out of this Plane app",
|
||||
placeholder: "https://example.com/logout",
|
||||
error: Boolean(errors.SAML_LOGOUT_URL),
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
key: "SAML_PROVIDER_NAME",
|
||||
type: "text",
|
||||
label: "IdP's name",
|
||||
description: (
|
||||
<>
|
||||
Optional field for the name that your users see on the <CodeBlock>Continue with</CodeBlock> button
|
||||
</>
|
||||
),
|
||||
placeholder: "Okta",
|
||||
error: Boolean(errors.SAML_PROVIDER_NAME),
|
||||
required: false,
|
||||
},
|
||||
];
|
||||
|
||||
const SAML_SERVICE_DETAILS: TCopyField[] = [
|
||||
{
|
||||
key: "Metadata_Information",
|
||||
label: "Entity ID | Audience | Metadata information",
|
||||
url: `${originURL}/auth/saml/metadata/`,
|
||||
description:
|
||||
"We will generate this bit of the metadata that identifies this Plane app as an authorized service on your IdP.",
|
||||
},
|
||||
{
|
||||
key: "Callback_URI",
|
||||
label: "Callback URI",
|
||||
url: `${originURL}/auth/saml/callback/`,
|
||||
description: (
|
||||
<>
|
||||
We will generate this{" "}
|
||||
<CodeBlock darkerShade>http-post request</CodeBlock> URL that you
|
||||
should paste into your <CodeBlock darkerShade>ACS URL</CodeBlock>{" "}
|
||||
or <CodeBlock darkerShade>Sign-in call back URL</CodeBlock> field
|
||||
on your IdP.
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: "Logout_URI",
|
||||
label: "Logout URI",
|
||||
url: `${originURL}/auth/saml/logout/`,
|
||||
description: (
|
||||
<>
|
||||
We will generate this{" "}
|
||||
<CodeBlock darkerShade>http-redirect request</CodeBlock> URL that
|
||||
you should paste into your{" "}
|
||||
<CodeBlock darkerShade>SLS URL</CodeBlock> or{" "}
|
||||
<CodeBlock darkerShade>Logout URL</CodeBlock>
|
||||
field on your IdP.
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
const onSubmit = async (formData: SAMLConfigFormValues) => {
|
||||
const payload: Partial<SAMLConfigFormValues> = { ...formData };
|
||||
|
||||
await updateInstanceConfigurations(payload)
|
||||
.then((response = []) => {
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Done!",
|
||||
message: "Your SAML-based authentication is configured. You should test it now.",
|
||||
});
|
||||
reset({
|
||||
SAML_ENTITY_ID: response.find((item) => item.key === "SAML_ENTITY_ID")?.value,
|
||||
SAML_SSO_URL: response.find((item) => item.key === "SAML_SSO_URL")?.value,
|
||||
SAML_LOGOUT_URL: response.find((item) => item.key === "SAML_LOGOUT_URL")?.value,
|
||||
SAML_CERTIFICATE: response.find((item) => item.key === "SAML_CERTIFICATE")?.value,
|
||||
SAML_PROVIDER_NAME: response.find((item) => item.key === "SAML_PROVIDER_NAME")?.value,
|
||||
});
|
||||
})
|
||||
.catch((err) => console.error(err));
|
||||
};
|
||||
|
||||
const handleGoBack = (e: React.MouseEvent<HTMLAnchorElement, MouseEvent>) => {
|
||||
if (isDirty) {
|
||||
e.preventDefault();
|
||||
setIsDiscardChangesModalOpen(true);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<ConfirmDiscardModal
|
||||
isOpen={isDiscardChangesModalOpen}
|
||||
onDiscardHref="/authentication"
|
||||
handleClose={() => setIsDiscardChangesModalOpen(false)}
|
||||
/>
|
||||
<div className="flex flex-col gap-8">
|
||||
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1 pt-1">
|
||||
<div className="pt-2.5 text-xl font-medium">IdP-provided details for Plane</div>
|
||||
{SAML_FORM_FIELDS.map((field) => (
|
||||
<ControllerInput
|
||||
key={field.key}
|
||||
control={control}
|
||||
type={field.type}
|
||||
name={field.key}
|
||||
label={field.label}
|
||||
description={field.description}
|
||||
placeholder={field.placeholder}
|
||||
error={field.error}
|
||||
required={field.required}
|
||||
/>
|
||||
))}
|
||||
<div className="flex flex-col gap-1">
|
||||
<h4 className="text-sm">SAML certificate</h4>
|
||||
<Controller
|
||||
control={control}
|
||||
name="SAML_CERTIFICATE"
|
||||
rules={{ required: "Certificate is required." }}
|
||||
render={({ field: { value, onChange } }) => (
|
||||
<TextArea
|
||||
id="SAML_CERTIFICATE"
|
||||
name="SAML_CERTIFICATE"
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
hasError={Boolean(errors.SAML_CERTIFICATE)}
|
||||
placeholder="---BEGIN CERTIFICATE---\n2yWn1gc7DhOFB9\nr0gbE+\n---END CERTIFICATE---"
|
||||
className="min-h-[102px] w-full rounded-md font-medium text-sm"
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
<p className="pt-0.5 text-xs text-custom-text-300">
|
||||
IdP-generated certificate for signing this Plane app as an authorized service provider for your IdP
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex flex-col gap-1 pt-4">
|
||||
<div className="flex items-center gap-4">
|
||||
<Button variant="primary" onClick={handleSubmit(onSubmit)} loading={isSubmitting} disabled={!isDirty}>
|
||||
{isSubmitting ? "Saving..." : "Save changes"}
|
||||
</Button>
|
||||
<Link
|
||||
href="/authentication"
|
||||
className={cn(getButtonStyling("link-neutral", "md"), "font-medium")}
|
||||
onClick={handleGoBack}
|
||||
>
|
||||
Go back
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="col-span-2 md:col-span-1">
|
||||
<div className="flex flex-col gap-y-4 px-6 pt-1.5 pb-4 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Plane-provided details for your IdP</div>
|
||||
{SAML_SERVICE_DETAILS.map((field) => (
|
||||
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
|
||||
))}
|
||||
<div className="flex flex-col gap-1">
|
||||
<h4 className="text-sm text-custom-text-200 font-medium">Mapping</h4>
|
||||
<SAMLAttributeMappingTable />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
102
admin/app/authentication/saml/page.tsx
Normal file
102
admin/app/authentication/saml/page.tsx
Normal file
@@ -0,0 +1,102 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { observer } from "mobx-react-lite";
|
||||
import Image from "next/image";
|
||||
import useSWR from "swr";
|
||||
// ui
|
||||
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
|
||||
// components
|
||||
import { PageHeader } from "@/components/common";
|
||||
import { AuthenticationMethodCard } from "@/components/authentication";
|
||||
import { InstanceSAMLConfigForm } from "./form";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// icons
|
||||
import SAMLLogo from "/public/logos/saml-logo.svg";
|
||||
|
||||
const InstanceSAMLAuthenticationPage = observer(() => {
|
||||
// store
|
||||
const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
|
||||
// state
|
||||
const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
|
||||
// config
|
||||
const enableSAMLConfig = formattedConfig?.IS_SAML_ENABLED ?? "";
|
||||
|
||||
useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
|
||||
|
||||
const updateConfig = async (key: "IS_SAML_ENABLED", value: string) => {
|
||||
setIsSubmitting(true);
|
||||
|
||||
const payload = {
|
||||
[key]: value,
|
||||
};
|
||||
|
||||
const updateConfigPromise = updateInstanceConfigurations(payload);
|
||||
|
||||
setPromiseToast(updateConfigPromise, {
|
||||
loading: "Saving Configuration...",
|
||||
success: {
|
||||
title: "Configuration saved",
|
||||
message: () => `SAML authentication is now ${value ? "active" : "disabled"}.`,
|
||||
},
|
||||
error: {
|
||||
title: "Error",
|
||||
message: () => "Failed to save configuration",
|
||||
},
|
||||
});
|
||||
|
||||
await updateConfigPromise
|
||||
.then(() => {
|
||||
setIsSubmitting(false);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
setIsSubmitting(false);
|
||||
});
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Authentication - God Mode" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<AuthenticationMethodCard
|
||||
name="SAML"
|
||||
description="Authenticate your users via Security Assertion Markup Language
|
||||
protocol."
|
||||
icon={<Image src={SAMLLogo} height={24} width={24} alt="SAML Logo" className="pl-0.5" />}
|
||||
config={
|
||||
<ToggleSwitch
|
||||
value={Boolean(parseInt(enableSAMLConfig))}
|
||||
onChange={() => {
|
||||
Boolean(parseInt(enableSAMLConfig)) === true
|
||||
? updateConfig("IS_SAML_ENABLED", "0")
|
||||
: updateConfig("IS_SAML_ENABLED", "1");
|
||||
}}
|
||||
size="sm"
|
||||
disabled={isSubmitting || !formattedConfig}
|
||||
/>
|
||||
}
|
||||
disabled={isSubmitting || !formattedConfig}
|
||||
withBorder={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<InstanceSAMLConfigForm config={formattedConfig} />
|
||||
) : (
|
||||
<Loader className="space-y-8">
|
||||
<Loader.Item height="50px" width="25%" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" width="50%" />
|
||||
</Loader>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
});
|
||||
|
||||
export default InstanceSAMLAuthenticationPage;
|
||||
@@ -1 +1,76 @@
|
||||
export * from "ce/components/authentication/authentication-modes";
|
||||
import { observer } from "mobx-react";
|
||||
import Image from "next/image";
|
||||
import { useTheme } from "next-themes";
|
||||
// types
|
||||
import {
|
||||
TInstanceAuthenticationMethodKeys as TBaseAuthenticationMethods,
|
||||
TInstanceAuthenticationModes,
|
||||
TInstanceEnterpriseAuthenticationMethodKeys,
|
||||
} from "@plane/types";
|
||||
// components
|
||||
import { AuthenticationMethodCard } from "@/components/authentication";
|
||||
// helpers
|
||||
import { getBaseAuthenticationModes } from "@/helpers/authentication.helper";
|
||||
// plane admin components
|
||||
import { OIDCConfiguration, SAMLConfiguration } from "@/plane-admin/components/authentication";
|
||||
// images
|
||||
import OIDCLogo from "@/public/logos/oidc-logo.svg";
|
||||
import SAMLLogo from "@/public/logos/saml-logo.svg";
|
||||
|
||||
type TInstanceAuthenticationMethodKeys = TBaseAuthenticationMethods | TInstanceEnterpriseAuthenticationMethodKeys;
|
||||
|
||||
export type TAuthenticationModeProps = {
|
||||
disabled: boolean;
|
||||
updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
|
||||
};
|
||||
|
||||
export type TGetAuthenticationModeProps = {
|
||||
disabled: boolean;
|
||||
updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
|
||||
resolvedTheme: string | undefined;
|
||||
};
|
||||
|
||||
// Enterprise authentication methods
|
||||
export const getAuthenticationModes: (props: TGetAuthenticationModeProps) => TInstanceAuthenticationModes[] = ({
|
||||
disabled,
|
||||
updateConfig,
|
||||
resolvedTheme,
|
||||
}) => [
|
||||
...getBaseAuthenticationModes({ disabled, updateConfig, resolvedTheme }),
|
||||
{
|
||||
key: "oidc",
|
||||
name: "OIDC",
|
||||
description: "Authenticate your users via the OpenID Connect protocol.",
|
||||
icon: <Image src={OIDCLogo} height={22} width={22} alt="OIDC Logo" />,
|
||||
config: <OIDCConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "saml",
|
||||
name: "SAML",
|
||||
description: "Authenticate your users via the Security Assertion Markup Language protocol.",
|
||||
icon: <Image src={SAMLLogo} height={22} width={22} alt="SAML Logo" className="pl-0.5" />,
|
||||
config: <SAMLConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
];
|
||||
|
||||
export const AuthenticationModes: React.FC<TAuthenticationModeProps> = observer((props) => {
|
||||
const { disabled, updateConfig } = props;
|
||||
// next-themes
|
||||
const { resolvedTheme } = useTheme();
|
||||
|
||||
return (
|
||||
<>
|
||||
{getAuthenticationModes({ disabled, updateConfig, resolvedTheme }).map((method) => (
|
||||
<AuthenticationMethodCard
|
||||
key={method.key}
|
||||
name={method.name}
|
||||
description={method.description}
|
||||
icon={method.icon}
|
||||
config={method.config}
|
||||
disabled={disabled}
|
||||
unavailable={method.unavailable}
|
||||
/>
|
||||
))}
|
||||
</>
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1 +1,4 @@
|
||||
export * from "./authentication-modes";
|
||||
export * from "./oidc-config";
|
||||
export * from "./saml-config";
|
||||
export * from "./saml-attribute-mapping-table";
|
||||
|
||||
72
admin/ee/components/authentication/oidc-config.tsx
Normal file
72
admin/ee/components/authentication/oidc-config.tsx
Normal file
@@ -0,0 +1,72 @@
|
||||
"use client";
|
||||
|
||||
import React from "react";
|
||||
import { observer } from "mobx-react-lite";
|
||||
import Link from "next/link";
|
||||
// icons
|
||||
import { Settings2 } from "lucide-react";
|
||||
// types
|
||||
import { TInstanceEnterpriseAuthenticationMethodKeys } from "@plane/types";
|
||||
// ui
|
||||
import { ToggleSwitch, getButtonStyling } from "@plane/ui";
|
||||
// helpers
|
||||
import { cn } from "@/helpers/common.helper";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
|
||||
type Props = {
|
||||
disabled: boolean;
|
||||
updateConfig: (
|
||||
key: TInstanceEnterpriseAuthenticationMethodKeys,
|
||||
value: string
|
||||
) => void;
|
||||
};
|
||||
|
||||
export const OIDCConfiguration: React.FC<Props> = observer((props) => {
|
||||
const { disabled, updateConfig } = props;
|
||||
// store
|
||||
const { formattedConfig } = useInstance();
|
||||
// derived values
|
||||
const enableOIDCConfig = formattedConfig?.IS_OIDC_ENABLED ?? "";
|
||||
const isOIDCConfigured =
|
||||
!!formattedConfig?.OIDC_CLIENT_ID && !!formattedConfig?.OIDC_CLIENT_SECRET;
|
||||
|
||||
return (
|
||||
<>
|
||||
{isOIDCConfigured ? (
|
||||
<div className="flex items-center gap-4">
|
||||
<Link
|
||||
href="/authentication/oidc"
|
||||
className={cn(
|
||||
getButtonStyling("link-primary", "md"),
|
||||
"font-medium"
|
||||
)}
|
||||
>
|
||||
Edit
|
||||
</Link>
|
||||
<ToggleSwitch
|
||||
value={Boolean(parseInt(enableOIDCConfig))}
|
||||
onChange={() => {
|
||||
Boolean(parseInt(enableOIDCConfig)) === true
|
||||
? updateConfig("IS_OIDC_ENABLED", "0")
|
||||
: updateConfig("IS_OIDC_ENABLED", "1");
|
||||
}}
|
||||
size="sm"
|
||||
disabled={disabled}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<Link
|
||||
href="/authentication/oidc"
|
||||
className={cn(
|
||||
getButtonStyling("neutral-primary", "sm"),
|
||||
"text-custom-text-300"
|
||||
)}
|
||||
>
|
||||
<Settings2 className="h-4 w-4 p-0.5 text-custom-text-300/80" />
|
||||
Configure
|
||||
</Link>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
});
|
||||
@@ -0,0 +1,28 @@
|
||||
export const SAMLAttributeMappingTable = () => (
|
||||
<table className="table-auto border-collapse text-custom-text-200 text-sm">
|
||||
<thead>
|
||||
<tr className="text-left">
|
||||
<th className="border-b border-r border-custom-border-300 px-4 py-1.5">IdP</th>
|
||||
<th className="border-b border-custom-border-300 px-4 py-1.5">Plane</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td className="border-t border-r border-custom-border-300 px-4 py-1.5">Name ID format</td>
|
||||
<td className="border-t border-custom-border-300 px-4 py-1.5">emailAddress</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td className="border-t border-r border-custom-border-300 px-4 py-1.5">first_name</td>
|
||||
<td className="border-t border-custom-border-300 px-4 py-1.5">user.firstName</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td className="border-t border-r border-custom-border-300 px-4 py-1.5">last_name</td>
|
||||
<td className="border-t border-custom-border-300 px-4 py-1.5">user.lastName</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td className="border-t border-r border-custom-border-300 px-4 py-1.5">email</td>
|
||||
<td className="border-t border-custom-border-300 px-4 py-1.5">user.email</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
72
admin/ee/components/authentication/saml-config.tsx
Normal file
72
admin/ee/components/authentication/saml-config.tsx
Normal file
@@ -0,0 +1,72 @@
|
||||
"use client";
|
||||
|
||||
import React from "react";
|
||||
import { observer } from "mobx-react-lite";
|
||||
import Link from "next/link";
|
||||
// icons
|
||||
import { Settings2 } from "lucide-react";
|
||||
// types
|
||||
import { TInstanceEnterpriseAuthenticationMethodKeys } from "@plane/types";
|
||||
// ui
|
||||
import { ToggleSwitch, getButtonStyling } from "@plane/ui";
|
||||
// helpers
|
||||
import { cn } from "@/helpers/common.helper";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
|
||||
type Props = {
|
||||
disabled: boolean;
|
||||
updateConfig: (
|
||||
key: TInstanceEnterpriseAuthenticationMethodKeys,
|
||||
value: string
|
||||
) => void;
|
||||
};
|
||||
|
||||
export const SAMLConfiguration: React.FC<Props> = observer((props) => {
|
||||
const { disabled, updateConfig } = props;
|
||||
// store
|
||||
const { formattedConfig } = useInstance();
|
||||
// derived values
|
||||
const enableSAMLConfig = formattedConfig?.IS_SAML_ENABLED ?? "";
|
||||
const isSAMLConfigured =
|
||||
!!formattedConfig?.SAML_ENTITY_ID && !!formattedConfig?.SAML_CERTIFICATE;
|
||||
|
||||
return (
|
||||
<>
|
||||
{isSAMLConfigured ? (
|
||||
<div className="flex items-center gap-4">
|
||||
<Link
|
||||
href="/authentication/saml"
|
||||
className={cn(
|
||||
getButtonStyling("link-primary", "md"),
|
||||
"font-medium"
|
||||
)}
|
||||
>
|
||||
Edit
|
||||
</Link>
|
||||
<ToggleSwitch
|
||||
value={Boolean(parseInt(enableSAMLConfig))}
|
||||
onChange={() => {
|
||||
Boolean(parseInt(enableSAMLConfig)) === true
|
||||
? updateConfig("IS_SAML_ENABLED", "0")
|
||||
: updateConfig("IS_SAML_ENABLED", "1");
|
||||
}}
|
||||
size="sm"
|
||||
disabled={disabled}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<Link
|
||||
href="/authentication/saml"
|
||||
className={cn(
|
||||
getButtonStyling("neutral-primary", "sm"),
|
||||
"text-custom-text-300"
|
||||
)}
|
||||
>
|
||||
<Settings2 className="h-4 w-4 p-0.5 text-custom-text-300/80" />
|
||||
Configure
|
||||
</Link>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
});
|
||||
@@ -10,7 +10,7 @@
|
||||
"@/*": ["core/*"],
|
||||
"@/helpers/*": ["helpers/*"],
|
||||
"@/public/*": ["public/*"],
|
||||
"@/plane-admin/*": ["ce/*"]
|
||||
"@/plane-admin/*": ["ee/*"]
|
||||
},
|
||||
"plugins": [
|
||||
{
|
||||
|
||||
@@ -50,6 +50,7 @@ GUNICORN_WORKERS=2
|
||||
ADMIN_BASE_URL=
|
||||
SPACE_BASE_URL=
|
||||
APP_BASE_URL=
|
||||
SILO_BASE_URL=
|
||||
|
||||
# Hard delete files after days
|
||||
HARD_DELETE_AFTER_DAYS=60
|
||||
|
||||
@@ -20,7 +20,8 @@ RUN apk --no-cache add \
|
||||
"make" \
|
||||
"postgresql-dev" \
|
||||
"libc-dev" \
|
||||
"linux-headers"
|
||||
"linux-headers" \
|
||||
"xmlsec-dev"
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
|
||||
18
apiserver/bin/docker-entrypoint-api-cloud.sh
Executable file
18
apiserver/bin/docker-entrypoint-api-cloud.sh
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
export SKIP_ENV_VAR=0
|
||||
|
||||
python manage.py wait_for_db
|
||||
# Wait for migrations
|
||||
python manage.py wait_for_migrations
|
||||
|
||||
# Clear Cache before starting to remove stale values
|
||||
python manage.py clear_cache
|
||||
|
||||
# Register instance if INSTANCE_ADMIN_EMAIL is set
|
||||
if [ -n "$INSTANCE_ADMIN_EMAIL" ]; then
|
||||
python manage.py setup_instance $INSTANCE_ADMIN_EMAIL
|
||||
fi
|
||||
|
||||
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||
39
apiserver/bin/docker-entrypoint-api-ee.sh
Executable file
39
apiserver/bin/docker-entrypoint-api-ee.sh
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
python manage.py wait_for_db
|
||||
# Wait for migrations
|
||||
python manage.py wait_for_migrations
|
||||
|
||||
# Create the default bucket
|
||||
#!/bin/bash
|
||||
|
||||
# Collect system information
|
||||
HOSTNAME=$(hostname)
|
||||
MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
|
||||
CPU_INFO=$(cat /proc/cpuinfo)
|
||||
MEMORY_INFO=$(free -h)
|
||||
DISK_INFO=$(df -h)
|
||||
|
||||
# Concatenate information and compute SHA-256 hash
|
||||
SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
|
||||
|
||||
# Export the variables
|
||||
MACHINE_SIGNATURE=${MACHINE_SIGNATURE:-$SIGNATURE}
|
||||
export SKIP_ENV_VAR=1
|
||||
|
||||
# License check
|
||||
python manage.py license_check
|
||||
|
||||
# Register instance
|
||||
python manage.py register_instance_ee "$MACHINE_SIGNATURE"
|
||||
|
||||
# Load the configuration variable
|
||||
python manage.py configure_instance
|
||||
|
||||
# Create the default bucket
|
||||
python manage.py create_bucket
|
||||
|
||||
# Clear Cache before starting to remove stale values
|
||||
python manage.py clear_cache
|
||||
|
||||
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||
@@ -32,4 +32,4 @@ python manage.py create_bucket
|
||||
# Clear Cache before starting to remove stale values
|
||||
python manage.py clear_cache
|
||||
|
||||
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||
@@ -19,6 +19,7 @@ from plane.app.permissions import ProjectLitePermission
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Inbox,
|
||||
IssueType,
|
||||
InboxIssue,
|
||||
Issue,
|
||||
Project,
|
||||
@@ -145,6 +146,11 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
is_triage=True,
|
||||
)
|
||||
|
||||
# Get the issue type
|
||||
issue_type = IssueType.objects.filter(
|
||||
project_id=project_id, is_default=True
|
||||
).first()
|
||||
|
||||
# create an issue
|
||||
issue = Issue.objects.create(
|
||||
name=request.data.get("issue", {}).get("name"),
|
||||
@@ -155,6 +161,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
priority=request.data.get("issue", {}).get("priority", "none"),
|
||||
project_id=project_id,
|
||||
state=state,
|
||||
type=issue_type,
|
||||
)
|
||||
|
||||
# create an inbox issue
|
||||
|
||||
8
apiserver/plane/app/authentication/session.py
Normal file
8
apiserver/plane/app/authentication/session.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from rest_framework.authentication import SessionAuthentication
|
||||
|
||||
|
||||
class BaseSessionAuthentication(SessionAuthentication):
|
||||
|
||||
# Disable csrf for the rest apis
|
||||
def enforce_csrf(self, request):
|
||||
return
|
||||
@@ -30,7 +30,6 @@ from .project import (
|
||||
ProjectIdentifierSerializer,
|
||||
ProjectLiteSerializer,
|
||||
ProjectMemberLiteSerializer,
|
||||
DeployBoardSerializer,
|
||||
ProjectMemberAdminSerializer,
|
||||
ProjectPublicMemberSerializer,
|
||||
ProjectMemberRoleSerializer,
|
||||
@@ -39,6 +38,7 @@ from .state import StateSerializer, StateLiteSerializer
|
||||
from .view import (
|
||||
IssueViewSerializer,
|
||||
)
|
||||
|
||||
from .cycle import (
|
||||
CycleSerializer,
|
||||
CycleIssueSerializer,
|
||||
@@ -124,3 +124,15 @@ from .webhook import WebhookSerializer, WebhookLogSerializer
|
||||
from .dashboard import DashboardSerializer, WidgetSerializer
|
||||
|
||||
from .favorite import UserFavoriteSerializer
|
||||
|
||||
from .integration import (
|
||||
IntegrationSerializer,
|
||||
WorkspaceIntegrationSerializer,
|
||||
GithubIssueSyncSerializer,
|
||||
GithubRepositorySerializer,
|
||||
GithubRepositorySyncSerializer,
|
||||
GithubCommentSyncSerializer,
|
||||
SlackProjectSyncSerializer,
|
||||
)
|
||||
|
||||
from .deploy_board import DeployBoardSerializer
|
||||
|
||||
@@ -3,6 +3,7 @@ from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
|
||||
from .issue import IssueStateSerializer
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
|
||||
21
apiserver/plane/app/serializers/deploy_board.py
Normal file
21
apiserver/plane/app/serializers/deploy_board.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.app.serializers.project import ProjectLiteSerializer
|
||||
from plane.app.serializers.workspace import WorkspaceLiteSerializer
|
||||
from plane.db.models import DeployBoard
|
||||
|
||||
|
||||
class DeployBoardSerializer(BaseSerializer):
|
||||
project_details = ProjectLiteSerializer(read_only=True, source="project")
|
||||
workspace_detail = WorkspaceLiteSerializer(
|
||||
read_only=True, source="workspace"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = DeployBoard
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"anchor",
|
||||
]
|
||||
8
apiserver/plane/app/serializers/integration/__init__.py
Normal file
8
apiserver/plane/app/serializers/integration/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from .base import IntegrationSerializer, WorkspaceIntegrationSerializer
|
||||
from .github import (
|
||||
GithubRepositorySerializer,
|
||||
GithubRepositorySyncSerializer,
|
||||
GithubIssueSyncSerializer,
|
||||
GithubCommentSyncSerializer,
|
||||
)
|
||||
from .slack import SlackProjectSyncSerializer
|
||||
22
apiserver/plane/app/serializers/integration/base.py
Normal file
22
apiserver/plane/app/serializers/integration/base.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Module imports
|
||||
from plane.app.serializers import BaseSerializer
|
||||
from plane.db.models import Integration, WorkspaceIntegration
|
||||
|
||||
|
||||
class IntegrationSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"verified",
|
||||
]
|
||||
|
||||
|
||||
class WorkspaceIntegrationSerializer(BaseSerializer):
|
||||
integration_detail = IntegrationSerializer(
|
||||
read_only=True, source="integration"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = WorkspaceIntegration
|
||||
fields = "__all__"
|
||||
45
apiserver/plane/app/serializers/integration/github.py
Normal file
45
apiserver/plane/app/serializers/integration/github.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Module imports
|
||||
from plane.app.serializers import BaseSerializer
|
||||
from plane.db.models import (
|
||||
GithubIssueSync,
|
||||
GithubRepository,
|
||||
GithubRepositorySync,
|
||||
GithubCommentSync,
|
||||
)
|
||||
|
||||
|
||||
class GithubRepositorySerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = GithubRepository
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class GithubRepositorySyncSerializer(BaseSerializer):
|
||||
repo_detail = GithubRepositorySerializer(source="repository")
|
||||
|
||||
class Meta:
|
||||
model = GithubRepositorySync
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class GithubIssueSyncSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = GithubIssueSync
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"project",
|
||||
"workspace",
|
||||
"repository_sync",
|
||||
]
|
||||
|
||||
|
||||
class GithubCommentSyncSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = GithubCommentSync
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"project",
|
||||
"workspace",
|
||||
"repository_sync",
|
||||
"issue_sync",
|
||||
]
|
||||
14
apiserver/plane/app/serializers/integration/slack.py
Normal file
14
apiserver/plane/app/serializers/integration/slack.py
Normal file
@@ -0,0 +1,14 @@
|
||||
# Module imports
|
||||
from plane.app.serializers import BaseSerializer
|
||||
from plane.db.models import SlackProjectSync
|
||||
|
||||
|
||||
class SlackProjectSyncSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = SlackProjectSync
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"project",
|
||||
"workspace",
|
||||
"workspace_integration",
|
||||
]
|
||||
@@ -33,6 +33,7 @@ from plane.db.models import (
|
||||
IssueVote,
|
||||
IssueRelation,
|
||||
State,
|
||||
IssueType,
|
||||
)
|
||||
|
||||
|
||||
@@ -52,6 +53,7 @@ class IssueFlatSerializer(BaseSerializer):
|
||||
"sequence_id",
|
||||
"sort_order",
|
||||
"is_draft",
|
||||
"type_id",
|
||||
]
|
||||
|
||||
|
||||
@@ -79,6 +81,12 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
required=False,
|
||||
allow_null=True,
|
||||
)
|
||||
type_id = serializers.PrimaryKeyRelatedField(
|
||||
source="type",
|
||||
queryset=IssueType.objects.all(),
|
||||
required=False,
|
||||
allow_null=True,
|
||||
)
|
||||
parent_id = serializers.PrimaryKeyRelatedField(
|
||||
source="parent",
|
||||
queryset=Issue.objects.all(),
|
||||
@@ -135,10 +143,20 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
workspace_id = self.context["workspace_id"]
|
||||
default_assignee_id = self.context["default_assignee_id"]
|
||||
|
||||
issue_type = validated_data.pop("type", None)
|
||||
|
||||
if not issue_type:
|
||||
# Get default issue type
|
||||
issue_type = IssueType.objects.filter(
|
||||
project_issue_types__project_id=project_id, is_default=True
|
||||
).first()
|
||||
issue_type = issue_type
|
||||
|
||||
# Create Issue
|
||||
issue = Issue.objects.create(
|
||||
**validated_data,
|
||||
project_id=project_id,
|
||||
type=issue_type,
|
||||
)
|
||||
|
||||
# Issue Audit Users
|
||||
@@ -701,6 +719,7 @@ class IssueSerializer(DynamicBaseSerializer):
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
"type_id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
@@ -712,6 +731,7 @@ class IssueLiteSerializer(DynamicBaseSerializer):
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"type_id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
@@ -30,6 +30,7 @@ class PageSerializer(BaseSerializer):
|
||||
child=serializers.UUIDField(),
|
||||
required=False,
|
||||
)
|
||||
anchor = serializers.CharField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Page
|
||||
@@ -53,10 +54,12 @@ class PageSerializer(BaseSerializer):
|
||||
"logo_props",
|
||||
"label_ids",
|
||||
"project_ids",
|
||||
"anchor",
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"owned_by",
|
||||
"anchor",
|
||||
]
|
||||
|
||||
def create(self, validated_data):
|
||||
@@ -125,6 +128,7 @@ class PageSerializer(BaseSerializer):
|
||||
|
||||
class PageDetailSerializer(PageSerializer):
|
||||
description_html = serializers.CharField()
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta(PageSerializer.Meta):
|
||||
fields = PageSerializer.Meta.fields + [
|
||||
|
||||
@@ -13,7 +13,6 @@ from plane.db.models import (
|
||||
ProjectMember,
|
||||
ProjectMemberInvite,
|
||||
ProjectIdentifier,
|
||||
DeployBoard,
|
||||
ProjectPublicMember,
|
||||
)
|
||||
|
||||
@@ -117,6 +116,12 @@ class ProjectListSerializer(DynamicBaseSerializer):
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
anchor = serializers.CharField(read_only=True)
|
||||
members = serializers.SerializerMethodField()
|
||||
# EE: project_grouping starts
|
||||
state_id = serializers.UUIDField(read_only=True)
|
||||
priority = serializers.CharField(read_only=True)
|
||||
start_date = serializers.DateTimeField(read_only=True)
|
||||
target_date = serializers.DateTimeField(read_only=True)
|
||||
# EE: project_grouping ends
|
||||
|
||||
def get_members(self, obj):
|
||||
project_members = getattr(obj, "members_list", None)
|
||||
@@ -207,22 +212,6 @@ class ProjectMemberLiteSerializer(BaseSerializer):
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class DeployBoardSerializer(BaseSerializer):
|
||||
project_details = ProjectLiteSerializer(read_only=True, source="project")
|
||||
workspace_detail = WorkspaceLiteSerializer(
|
||||
read_only=True, source="workspace"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = DeployBoard
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"anchor",
|
||||
]
|
||||
|
||||
|
||||
class ProjectPublicMemberSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = ProjectPublicMember
|
||||
|
||||
@@ -9,6 +9,7 @@ from plane.utils.issue_filters import issue_filters
|
||||
|
||||
class IssueViewSerializer(DynamicBaseSerializer):
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
anchor = serializers.CharField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = IssueView
|
||||
|
||||
@@ -18,6 +18,13 @@ from .views import urlpatterns as view_urls
|
||||
from .webhook import urlpatterns as webhook_urls
|
||||
from .workspace import urlpatterns as workspace_urls
|
||||
|
||||
# Integrations URLS
|
||||
from .importer import urlpatterns as importer_urls
|
||||
from .integration import urlpatterns as integration_urls
|
||||
|
||||
# url patterns
|
||||
from plane.ee.urls.app import urlpatterns as ee_urls
|
||||
|
||||
urlpatterns = [
|
||||
*analytic_urls,
|
||||
*asset_urls,
|
||||
@@ -38,4 +45,8 @@ urlpatterns = [
|
||||
*workspace_urls,
|
||||
*api_urls,
|
||||
*webhook_urls,
|
||||
# ee
|
||||
*integration_urls,
|
||||
*importer_urls,
|
||||
*ee_urls,
|
||||
]
|
||||
|
||||
43
apiserver/plane/app/urls/importer.py
Normal file
43
apiserver/plane/app/urls/importer.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.app.views import (
|
||||
ServiceIssueImportSummaryEndpoint,
|
||||
ImportServiceEndpoint,
|
||||
UpdateServiceImportStatusEndpoint,
|
||||
BulkImportIssuesEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/importers/<str:service>/",
|
||||
ServiceIssueImportSummaryEndpoint.as_view(),
|
||||
name="importer-summary",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/importers/<str:service>/",
|
||||
ImportServiceEndpoint.as_view(),
|
||||
name="importer",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/importers/",
|
||||
ImportServiceEndpoint.as_view(),
|
||||
name="importer",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/importers/<str:service>/<uuid:pk>/",
|
||||
ImportServiceEndpoint.as_view(),
|
||||
name="importer",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/service/<str:service>/importers/<uuid:importer_id>/",
|
||||
UpdateServiceImportStatusEndpoint.as_view(),
|
||||
name="importer-status",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
|
||||
BulkImportIssuesEndpoint.as_view(),
|
||||
name="bulk-import-issues",
|
||||
),
|
||||
]
|
||||
150
apiserver/plane/app/urls/integration.py
Normal file
150
apiserver/plane/app/urls/integration.py
Normal file
@@ -0,0 +1,150 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.app.views import (
|
||||
IntegrationViewSet,
|
||||
WorkspaceIntegrationViewSet,
|
||||
GithubRepositoriesEndpoint,
|
||||
GithubRepositorySyncViewSet,
|
||||
GithubIssueSyncViewSet,
|
||||
GithubCommentSyncViewSet,
|
||||
BulkCreateGithubIssueSyncEndpoint,
|
||||
SlackProjectSyncViewSet,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"integrations/",
|
||||
IntegrationViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="integrations",
|
||||
),
|
||||
path(
|
||||
"integrations/<uuid:pk>/",
|
||||
IntegrationViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="integrations",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-integrations/",
|
||||
WorkspaceIntegrationViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
name="workspace-integrations",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-integrations/<str:provider>/",
|
||||
WorkspaceIntegrationViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="workspace-integrations",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-integrations/<uuid:pk>/provider/",
|
||||
WorkspaceIntegrationViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="workspace-integrations",
|
||||
),
|
||||
# Github Integrations
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-integrations/<uuid:workspace_integration_id>/github-repositories/",
|
||||
GithubRepositoriesEndpoint.as_view(),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/",
|
||||
GithubRepositorySyncViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/<uuid:pk>/",
|
||||
GithubRepositorySyncViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/",
|
||||
GithubIssueSyncViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/bulk-create-github-issue-sync/",
|
||||
BulkCreateGithubIssueSyncEndpoint.as_view(),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
|
||||
GithubIssueSyncViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/",
|
||||
GithubCommentSyncViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/<uuid:pk>/",
|
||||
GithubCommentSyncViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
),
|
||||
## End Github Integrations
|
||||
# Slack Integration
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/",
|
||||
SlackProjectSyncViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/<uuid:pk>/",
|
||||
SlackProjectSyncViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
"get": "retrieve",
|
||||
}
|
||||
),
|
||||
),
|
||||
## End Slack Integration
|
||||
]
|
||||
@@ -19,7 +19,6 @@ from plane.app.views import (
|
||||
IssueUserDisplayPropertyEndpoint,
|
||||
IssueViewSet,
|
||||
LabelViewSet,
|
||||
BulkArchiveIssuesEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
@@ -82,11 +81,6 @@ urlpatterns = [
|
||||
BulkDeleteIssuesEndpoint.as_view(),
|
||||
name="project-issues-bulk",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-archive-issues/",
|
||||
BulkArchiveIssuesEndpoint.as_view(),
|
||||
name="bulk-archive-issues",
|
||||
),
|
||||
##
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
|
||||
|
||||
@@ -44,6 +44,17 @@ urlpatterns = [
|
||||
),
|
||||
name="user-favorite-pages",
|
||||
),
|
||||
# Lock
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/lock/",
|
||||
PageViewSet.as_view(
|
||||
{
|
||||
"post": "lock",
|
||||
"delete": "unlock",
|
||||
}
|
||||
),
|
||||
name="project-page-lock-unlock",
|
||||
),
|
||||
# archived pages
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/archive/",
|
||||
|
||||
@@ -4,6 +4,9 @@ from django.urls import path
|
||||
from plane.app.views import (
|
||||
GlobalSearchEndpoint,
|
||||
IssueSearchEndpoint,
|
||||
SearchEndpoint,
|
||||
WorkspaceSearchEndpoint,
|
||||
WorkspaceEntitySearchEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -18,4 +21,19 @@ urlpatterns = [
|
||||
IssueSearchEndpoint.as_view(),
|
||||
name="project-issue-search",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/search/",
|
||||
SearchEndpoint.as_view(),
|
||||
name="search",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/app-search/",
|
||||
WorkspaceSearchEndpoint.as_view(),
|
||||
name="app-search",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/entity-search/",
|
||||
WorkspaceEntitySearchEndpoint.as_view(),
|
||||
name="entity-search",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -118,7 +118,7 @@ from .issue.activity import (
|
||||
IssueActivityEndpoint,
|
||||
)
|
||||
|
||||
from .issue.archive import IssueArchiveViewSet, BulkArchiveIssuesEndpoint
|
||||
from .issue.archive import IssueArchiveViewSet
|
||||
|
||||
from .issue.attachment import (
|
||||
IssueAttachmentEndpoint,
|
||||
@@ -185,9 +185,17 @@ from .page.base import (
|
||||
)
|
||||
from .page.version import PageVersionEndpoint
|
||||
|
||||
from .search.base import (
|
||||
GlobalSearchEndpoint,
|
||||
SearchEndpoint,
|
||||
)
|
||||
|
||||
from .search.base import GlobalSearchEndpoint
|
||||
from .search.issue import IssueSearchEndpoint
|
||||
|
||||
from .search.workspace import (
|
||||
WorkspaceSearchEndpoint,
|
||||
WorkspaceEntitySearchEndpoint,
|
||||
)
|
||||
|
||||
from .external.base import (
|
||||
GPTIntegrationEndpoint,
|
||||
@@ -229,6 +237,28 @@ from .dashboard.base import DashboardEndpoint, WidgetsEndpoint
|
||||
|
||||
from .error_404 import custom_404_view
|
||||
|
||||
from .importer.base import (
|
||||
ServiceIssueImportSummaryEndpoint,
|
||||
ImportServiceEndpoint,
|
||||
UpdateServiceImportStatusEndpoint,
|
||||
BulkImportIssuesEndpoint,
|
||||
BulkImportModulesEndpoint,
|
||||
)
|
||||
|
||||
from .integration.base import (
|
||||
IntegrationViewSet,
|
||||
WorkspaceIntegrationViewSet,
|
||||
)
|
||||
|
||||
from .integration.github import (
|
||||
GithubRepositoriesEndpoint,
|
||||
GithubRepositorySyncViewSet,
|
||||
GithubIssueSyncViewSet,
|
||||
GithubCommentSyncViewSet,
|
||||
BulkCreateGithubIssueSyncEndpoint,
|
||||
)
|
||||
|
||||
from .integration.slack import SlackProjectSyncViewSet
|
||||
from .exporter.base import ExportIssuesEndpoint
|
||||
from .notification.base import MarkAllReadNotificationViewSet
|
||||
from .user.base import AccountEndpoint, ProfileEndpoint, UserSessionEndpoint
|
||||
|
||||
@@ -28,6 +28,7 @@ from django.core.serializers.json import DjangoJSONEncoder
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from plane.app.permissions import (
|
||||
allow_permission, ROLE
|
||||
)
|
||||
|
||||
@@ -17,7 +17,6 @@ from .. import BaseViewSet
|
||||
from plane.app.serializers import (
|
||||
CycleIssueSerializer,
|
||||
)
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
CycleIssue,
|
||||
@@ -30,6 +29,7 @@ from plane.utils.grouper import (
|
||||
issue_on_results,
|
||||
issue_queryset_grouper,
|
||||
)
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import (
|
||||
|
||||
560
apiserver/plane/app/views/importer/base.py
Normal file
560
apiserver/plane/app/views/importer/base.py
Normal file
@@ -0,0 +1,560 @@
|
||||
# Python imports
|
||||
import uuid
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Max, Q
|
||||
|
||||
# Module imports
|
||||
from plane.app.views import BaseAPIView
|
||||
from plane.db.models import (
|
||||
WorkspaceIntegration,
|
||||
Importer,
|
||||
APIToken,
|
||||
Project,
|
||||
State,
|
||||
IssueSequence,
|
||||
Issue,
|
||||
IssueActivity,
|
||||
IssueComment,
|
||||
IssueLink,
|
||||
IssueLabel,
|
||||
Workspace,
|
||||
IssueAssignee,
|
||||
Module,
|
||||
ModuleLink,
|
||||
ModuleIssue,
|
||||
Label,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
ImporterSerializer,
|
||||
IssueFlatSerializer,
|
||||
ModuleSerializer,
|
||||
)
|
||||
from plane.utils.integrations.github import get_github_repo_details
|
||||
from plane.utils.importers.jira import (
|
||||
jira_project_issue_summary,
|
||||
is_allowed_hostname,
|
||||
)
|
||||
from plane.bgtasks.importer_task import service_importer
|
||||
from plane.utils.html_processor import strip_tags
|
||||
from plane.app.permissions import WorkSpaceAdminPermission
|
||||
|
||||
|
||||
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, service):
|
||||
if service == "github":
|
||||
owner = request.GET.get("owner", False)
|
||||
repo = request.GET.get("repo", False)
|
||||
|
||||
if not owner or not repo:
|
||||
return Response(
|
||||
{"error": "Owner and repo are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
integration__provider="github", workspace__slug=slug
|
||||
)
|
||||
|
||||
access_tokens_url = workspace_integration.metadata.get(
|
||||
"access_tokens_url", False
|
||||
)
|
||||
|
||||
if not access_tokens_url:
|
||||
return Response(
|
||||
{
|
||||
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
issue_count, labels, collaborators = get_github_repo_details(
|
||||
access_tokens_url, owner, repo
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"issue_count": issue_count,
|
||||
"labels": labels,
|
||||
"collaborators": collaborators,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
if service == "jira":
|
||||
# Check for all the keys
|
||||
params = {
|
||||
"project_key": "Project key is required",
|
||||
"api_token": "API token is required",
|
||||
"email": "Email is required",
|
||||
"cloud_hostname": "Cloud hostname is required",
|
||||
}
|
||||
|
||||
for key, error_message in params.items():
|
||||
if not request.GET.get(key, False):
|
||||
return Response(
|
||||
{"error": error_message},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
project_key = request.GET.get("project_key", "")
|
||||
api_token = request.GET.get("api_token", "")
|
||||
email = request.GET.get("email", "")
|
||||
cloud_hostname = request.GET.get("cloud_hostname", "")
|
||||
|
||||
response = jira_project_issue_summary(
|
||||
email, api_token, project_key, cloud_hostname
|
||||
)
|
||||
if "error" in response:
|
||||
return Response(response, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(
|
||||
response,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return Response(
|
||||
{"error": "Service not supported yet"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class ImportServiceEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
def post(self, request, slug, service):
|
||||
project_id = request.data.get("project_id", False)
|
||||
|
||||
if not project_id:
|
||||
return Response(
|
||||
{"error": "Project ID is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
if service == "github":
|
||||
data = request.data.get("data", False)
|
||||
metadata = request.data.get("metadata", False)
|
||||
config = request.data.get("config", False)
|
||||
if not data or not metadata or not config:
|
||||
return Response(
|
||||
{"error": "Data, config and metadata are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
api_token = APIToken.objects.filter(
|
||||
user=request.user, workspace=workspace
|
||||
).first()
|
||||
if api_token is None:
|
||||
api_token = APIToken.objects.create(
|
||||
user=request.user,
|
||||
label="Importer",
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
importer = Importer.objects.create(
|
||||
service=service,
|
||||
project_id=project_id,
|
||||
status="queued",
|
||||
initiated_by=request.user,
|
||||
data=data,
|
||||
metadata=metadata,
|
||||
token=api_token,
|
||||
config=config,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
|
||||
service_importer.delay(service, importer.id)
|
||||
serializer = ImporterSerializer(importer)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
if service == "jira":
|
||||
data = request.data.get("data", False)
|
||||
metadata = request.data.get("metadata", False)
|
||||
config = request.data.get("config", False)
|
||||
|
||||
cloud_hostname = metadata.get("cloud_hostname", False)
|
||||
|
||||
if not cloud_hostname:
|
||||
return Response(
|
||||
{"error": "Cloud hostname is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not is_allowed_hostname(cloud_hostname):
|
||||
return Response(
|
||||
{"error": "Hostname is not a valid hostname."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not data or not metadata:
|
||||
return Response(
|
||||
{"error": "Data, config and metadata are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
api_token = APIToken.objects.filter(
|
||||
user=request.user, workspace=workspace
|
||||
).first()
|
||||
if api_token is None:
|
||||
api_token = APIToken.objects.create(
|
||||
user=request.user,
|
||||
label="Importer",
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
importer = Importer.objects.create(
|
||||
service=service,
|
||||
project_id=project_id,
|
||||
status="queued",
|
||||
initiated_by=request.user,
|
||||
data=data,
|
||||
metadata=metadata,
|
||||
token=api_token,
|
||||
config=config,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
|
||||
service_importer.delay(service, importer.id)
|
||||
serializer = ImporterSerializer(importer)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
return Response(
|
||||
{"error": "Servivce not supported yet"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def get(self, request, slug):
|
||||
imports = (
|
||||
Importer.objects.filter(workspace__slug=slug)
|
||||
.order_by("-created_at")
|
||||
.select_related("initiated_by", "project", "workspace")
|
||||
)
|
||||
serializer = ImporterSerializer(imports, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
def delete(self, request, slug, service, pk):
|
||||
importer = Importer.objects.get(
|
||||
pk=pk, service=service, workspace__slug=slug
|
||||
)
|
||||
|
||||
if importer.imported_data is not None:
|
||||
# Delete all imported Issues
|
||||
imported_issues = importer.imported_data.get("issues", [])
|
||||
Issue.issue_objects.filter(id__in=imported_issues).delete()
|
||||
|
||||
# Delete all imported Labels
|
||||
imported_labels = importer.imported_data.get("labels", [])
|
||||
Label.objects.filter(id__in=imported_labels).delete()
|
||||
|
||||
if importer.service == "jira":
|
||||
imported_modules = importer.imported_data.get("modules", [])
|
||||
Module.objects.filter(id__in=imported_modules).delete()
|
||||
importer.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def patch(self, request, slug, service, pk):
|
||||
importer = Importer.objects.get(
|
||||
pk=pk, service=service, workspace__slug=slug
|
||||
)
|
||||
serializer = ImporterSerializer(
|
||||
importer, data=request.data, partial=True
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class UpdateServiceImportStatusEndpoint(BaseAPIView):
|
||||
def post(self, request, slug, project_id, service, importer_id):
|
||||
importer = Importer.objects.get(
|
||||
pk=importer_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
service=service,
|
||||
)
|
||||
importer.status = request.data.get("status", "processing")
|
||||
importer.save()
|
||||
return Response(status.HTTP_200_OK)
|
||||
|
||||
|
||||
class BulkImportIssuesEndpoint(BaseAPIView):
|
||||
def post(self, request, slug, project_id, service):
|
||||
# Get the project
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
|
||||
# Get the default state
|
||||
default_state = State.objects.filter(
|
||||
~Q(name="Triage"), project_id=project_id, default=True
|
||||
).first()
|
||||
# if there is no default state assign any random state
|
||||
if default_state is None:
|
||||
default_state = State.objects.filter(
|
||||
~Q(name="Triage"), project_id=project_id
|
||||
).first()
|
||||
|
||||
# Get the maximum sequence_id
|
||||
last_id = IssueSequence.objects.filter(
|
||||
project_id=project_id
|
||||
).aggregate(largest=Max("sequence"))["largest"]
|
||||
|
||||
last_id = 1 if last_id is None else last_id + 1
|
||||
|
||||
# Get the maximum sort order
|
||||
largest_sort_order = Issue.objects.filter(
|
||||
project_id=project_id, state=default_state
|
||||
).aggregate(largest=Max("sort_order"))["largest"]
|
||||
|
||||
largest_sort_order = (
|
||||
65535 if largest_sort_order is None else largest_sort_order + 10000
|
||||
)
|
||||
|
||||
# Get the issues_data
|
||||
issues_data = request.data.get("issues_data", [])
|
||||
|
||||
if not len(issues_data):
|
||||
return Response(
|
||||
{"error": "Issue data is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Issues
|
||||
bulk_issues = []
|
||||
for issue_data in issues_data:
|
||||
bulk_issues.append(
|
||||
Issue(
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
state_id=(
|
||||
issue_data.get("state")
|
||||
if issue_data.get("state", False)
|
||||
else default_state.id
|
||||
),
|
||||
name=issue_data.get("name", "Issue Created through Bulk"),
|
||||
description_html=issue_data.get(
|
||||
"description_html", "<p></p>"
|
||||
),
|
||||
description_stripped=(
|
||||
None
|
||||
if (
|
||||
issue_data.get("description_html") == ""
|
||||
or issue_data.get("description_html") is None
|
||||
)
|
||||
else strip_tags(issue_data.get("description_html"))
|
||||
),
|
||||
sequence_id=last_id,
|
||||
sort_order=largest_sort_order,
|
||||
start_date=issue_data.get("start_date", None),
|
||||
target_date=issue_data.get("target_date", None),
|
||||
priority=issue_data.get("priority", "none"),
|
||||
created_by=request.user,
|
||||
)
|
||||
)
|
||||
|
||||
largest_sort_order = largest_sort_order + 10000
|
||||
last_id = last_id + 1
|
||||
|
||||
issues = Issue.objects.bulk_create(
|
||||
bulk_issues,
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Sequences
|
||||
_ = IssueSequence.objects.bulk_create(
|
||||
[
|
||||
IssueSequence(
|
||||
issue=issue,
|
||||
sequence=issue.sequence_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
)
|
||||
for issue in issues
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
# Attach Labels
|
||||
bulk_issue_labels = []
|
||||
for issue, issue_data in zip(issues, issues_data):
|
||||
labels_list = issue_data.get("labels_list", [])
|
||||
bulk_issue_labels = bulk_issue_labels + [
|
||||
IssueLabel(
|
||||
issue=issue,
|
||||
label_id=label_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for label_id in labels_list
|
||||
]
|
||||
|
||||
_ = IssueLabel.objects.bulk_create(
|
||||
bulk_issue_labels, batch_size=100, ignore_conflicts=True
|
||||
)
|
||||
|
||||
# Attach Assignees
|
||||
bulk_issue_assignees = []
|
||||
for issue, issue_data in zip(issues, issues_data):
|
||||
assignees_list = issue_data.get("assignees_list", [])
|
||||
bulk_issue_assignees = bulk_issue_assignees + [
|
||||
IssueAssignee(
|
||||
issue=issue,
|
||||
assignee_id=assignee_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for assignee_id in assignees_list
|
||||
]
|
||||
|
||||
_ = IssueAssignee.objects.bulk_create(
|
||||
bulk_issue_assignees, batch_size=100, ignore_conflicts=True
|
||||
)
|
||||
|
||||
# Track the issue activities
|
||||
IssueActivity.objects.bulk_create(
|
||||
[
|
||||
IssueActivity(
|
||||
issue=issue,
|
||||
actor=request.user,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
comment=f"imported the issue from {service}",
|
||||
verb="created",
|
||||
created_by=request.user,
|
||||
)
|
||||
for issue in issues
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
# Create Comments
|
||||
bulk_issue_comments = []
|
||||
for issue, issue_data in zip(issues, issues_data):
|
||||
comments_list = issue_data.get("comments_list", [])
|
||||
bulk_issue_comments = bulk_issue_comments + [
|
||||
IssueComment(
|
||||
issue=issue,
|
||||
comment_html=comment.get("comment_html", "<p></p>"),
|
||||
actor=request.user,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for comment in comments_list
|
||||
]
|
||||
|
||||
_ = IssueComment.objects.bulk_create(
|
||||
bulk_issue_comments, batch_size=100
|
||||
)
|
||||
|
||||
# Attach Links
|
||||
_ = IssueLink.objects.bulk_create(
|
||||
[
|
||||
IssueLink(
|
||||
issue=issue,
|
||||
url=issue_data.get("link", {}).get(
|
||||
"url", "https://github.com"
|
||||
),
|
||||
title=issue_data.get("link", {}).get(
|
||||
"title", "Original Issue"
|
||||
),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for issue, issue_data in zip(issues, issues_data)
|
||||
]
|
||||
)
|
||||
|
||||
return Response(
|
||||
{"issues": IssueFlatSerializer(issues, many=True).data},
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
|
||||
class BulkImportModulesEndpoint(BaseAPIView):
|
||||
def post(self, request, slug, project_id, service):
|
||||
modules_data = request.data.get("modules_data", [])
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
|
||||
modules = Module.objects.bulk_create(
|
||||
[
|
||||
Module(
|
||||
name=module.get("name", uuid.uuid4().hex),
|
||||
description=module.get("description", ""),
|
||||
start_date=module.get("start_date", None),
|
||||
target_date=module.get("target_date", None),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for module in modules_data
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
modules = Module.objects.filter(
|
||||
id__in=[module.id for module in modules]
|
||||
)
|
||||
|
||||
if len(modules) == len(modules_data):
|
||||
_ = ModuleLink.objects.bulk_create(
|
||||
[
|
||||
ModuleLink(
|
||||
module=module,
|
||||
url=module_data.get("link", {}).get(
|
||||
"url", "https://plane.so"
|
||||
),
|
||||
title=module_data.get("link", {}).get(
|
||||
"title", "Original Issue"
|
||||
),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for module, module_data in zip(modules, modules_data)
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
bulk_module_issues = []
|
||||
for module, module_data in zip(modules, modules_data):
|
||||
module_issues_list = module_data.get("module_issues_list", [])
|
||||
bulk_module_issues = bulk_module_issues + [
|
||||
ModuleIssue(
|
||||
issue_id=issue,
|
||||
module=module,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for issue in module_issues_list
|
||||
]
|
||||
|
||||
_ = ModuleIssue.objects.bulk_create(
|
||||
bulk_module_issues, batch_size=100, ignore_conflicts=True
|
||||
)
|
||||
|
||||
serializer = ModuleSerializer(modules, many=True)
|
||||
return Response(
|
||||
{"modules": serializer.data}, status=status.HTTP_201_CREATED
|
||||
)
|
||||
|
||||
else:
|
||||
return Response(
|
||||
{
|
||||
"message": "Modules created but issues could not be imported"
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
9
apiserver/plane/app/views/integration/__init__.py
Normal file
9
apiserver/plane/app/views/integration/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from .base import IntegrationViewSet, WorkspaceIntegrationViewSet
|
||||
from .github import (
|
||||
GithubRepositorySyncViewSet,
|
||||
GithubIssueSyncViewSet,
|
||||
BulkCreateGithubIssueSyncEndpoint,
|
||||
GithubCommentSyncViewSet,
|
||||
GithubRepositoriesEndpoint,
|
||||
)
|
||||
from .slack import SlackProjectSyncViewSet
|
||||
181
apiserver/plane/app/views/integration/base.py
Normal file
181
apiserver/plane/app/views/integration/base.py
Normal file
@@ -0,0 +1,181 @@
|
||||
# Python improts
|
||||
import uuid
|
||||
|
||||
# Django imports
|
||||
from django.contrib.auth.hashers import make_password
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
# Module imports
|
||||
from plane.app.views import BaseViewSet
|
||||
from plane.db.models import (
|
||||
Integration,
|
||||
WorkspaceIntegration,
|
||||
Workspace,
|
||||
User,
|
||||
WorkspaceMember,
|
||||
APIToken,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
IntegrationSerializer,
|
||||
WorkspaceIntegrationSerializer,
|
||||
)
|
||||
from plane.utils.integrations.github import (
|
||||
get_github_metadata,
|
||||
delete_github_installation,
|
||||
)
|
||||
from plane.app.permissions import WorkSpaceAdminPermission
|
||||
from plane.utils.integrations.slack import slack_oauth
|
||||
|
||||
|
||||
class IntegrationViewSet(BaseViewSet):
|
||||
serializer_class = IntegrationSerializer
|
||||
model = Integration
|
||||
|
||||
def create(self, request):
|
||||
serializer = IntegrationSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def partial_update(self, request, pk):
|
||||
integration = Integration.objects.get(pk=pk)
|
||||
if integration.verified:
|
||||
return Response(
|
||||
{"error": "Verified integrations cannot be updated"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = IntegrationSerializer(
|
||||
integration, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def destroy(self, request, pk):
|
||||
integration = Integration.objects.get(pk=pk)
|
||||
if integration.verified:
|
||||
return Response(
|
||||
{"error": "Verified integrations cannot be updated"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
integration.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class WorkspaceIntegrationViewSet(BaseViewSet):
|
||||
serializer_class = WorkspaceIntegrationSerializer
|
||||
model = WorkspaceIntegration
|
||||
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.select_related("integration")
|
||||
)
|
||||
|
||||
def create(self, request, slug, provider):
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
integration = Integration.objects.get(provider=provider)
|
||||
config = {}
|
||||
if provider == "github":
|
||||
installation_id = request.data.get("installation_id", None)
|
||||
if not installation_id:
|
||||
return Response(
|
||||
{"error": "Installation ID is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
metadata = get_github_metadata(installation_id)
|
||||
config = {"installation_id": installation_id}
|
||||
|
||||
if provider == "slack":
|
||||
code = request.data.get("code", False)
|
||||
|
||||
if not code:
|
||||
return Response(
|
||||
{"error": "Code is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
slack_response = slack_oauth(code=code)
|
||||
|
||||
metadata = slack_response
|
||||
access_token = metadata.get("access_token", False)
|
||||
team_id = metadata.get("team", {}).get("id", False)
|
||||
if not metadata or not access_token or not team_id:
|
||||
return Response(
|
||||
{
|
||||
"error": "Slack could not be installed. Please try again later"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
config = {"team_id": team_id, "access_token": access_token}
|
||||
|
||||
# Create a bot user
|
||||
bot_user = User.objects.create(
|
||||
email=f"{uuid.uuid4().hex}@plane.so",
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
is_bot=True,
|
||||
first_name=integration.title,
|
||||
avatar=(
|
||||
integration.avatar_url
|
||||
if integration.avatar_url is not None
|
||||
else ""
|
||||
),
|
||||
)
|
||||
|
||||
# Create an API Token for the bot user
|
||||
api_token = APIToken.objects.create(
|
||||
user=bot_user,
|
||||
user_type=1, # bot user
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.create(
|
||||
workspace=workspace,
|
||||
integration=integration,
|
||||
actor=bot_user,
|
||||
api_token=api_token,
|
||||
metadata=metadata,
|
||||
config=config,
|
||||
)
|
||||
|
||||
# Add bot user as a member of workspace
|
||||
_ = WorkspaceMember.objects.create(
|
||||
workspace=workspace_integration.workspace,
|
||||
member=bot_user,
|
||||
role=20,
|
||||
)
|
||||
return Response(
|
||||
WorkspaceIntegrationSerializer(workspace_integration).data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, pk):
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
pk=pk, workspace__slug=slug
|
||||
)
|
||||
|
||||
if workspace_integration.integration.provider == "github":
|
||||
installation_id = workspace_integration.config.get(
|
||||
"installation_id", False
|
||||
)
|
||||
if installation_id:
|
||||
delete_github_installation(installation_id=installation_id)
|
||||
|
||||
workspace_integration.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
201
apiserver/plane/app/views/integration/github.py
Normal file
201
apiserver/plane/app/views/integration/github.py
Normal file
@@ -0,0 +1,201 @@
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.app.views import BaseViewSet, BaseAPIView
|
||||
from plane.db.models import (
|
||||
GithubIssueSync,
|
||||
GithubRepositorySync,
|
||||
GithubRepository,
|
||||
WorkspaceIntegration,
|
||||
ProjectMember,
|
||||
Label,
|
||||
GithubCommentSync,
|
||||
Project,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
GithubIssueSyncSerializer,
|
||||
GithubRepositorySyncSerializer,
|
||||
GithubCommentSyncSerializer,
|
||||
)
|
||||
from plane.utils.integrations.github import get_github_repos
|
||||
from plane.app.permissions import (
|
||||
ProjectBasePermission,
|
||||
ProjectEntityPermission,
|
||||
)
|
||||
|
||||
|
||||
class GithubRepositoriesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectBasePermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, workspace_integration_id):
|
||||
page = request.GET.get("page", 1)
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
workspace__slug=slug, pk=workspace_integration_id
|
||||
)
|
||||
|
||||
if workspace_integration.integration.provider != "github":
|
||||
return Response(
|
||||
{"error": "Not a github integration"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
|
||||
repositories_url = (
|
||||
workspace_integration.metadata["repositories_url"]
|
||||
+ f"?per_page=100&page={page}"
|
||||
)
|
||||
repositories = get_github_repos(access_tokens_url, repositories_url)
|
||||
return Response(repositories, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class GithubRepositorySyncViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectBasePermission,
|
||||
]
|
||||
|
||||
serializer_class = GithubRepositorySyncSerializer
|
||||
model = GithubRepositorySync
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save(project_id=self.kwargs.get("project_id"))
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, workspace_integration_id):
|
||||
name = request.data.get("name", False)
|
||||
url = request.data.get("url", False)
|
||||
config = request.data.get("config", {})
|
||||
repository_id = request.data.get("repository_id", False)
|
||||
owner = request.data.get("owner", False)
|
||||
|
||||
if not name or not url or not repository_id or not owner:
|
||||
return Response(
|
||||
{"error": "Name, url, repository_id and owner are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the workspace integration
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
pk=workspace_integration_id
|
||||
)
|
||||
|
||||
# Delete the old repository object
|
||||
GithubRepositorySync.objects.filter(
|
||||
project_id=project_id, workspace__slug=slug
|
||||
).delete()
|
||||
GithubRepository.objects.filter(
|
||||
project_id=project_id, workspace__slug=slug
|
||||
).delete()
|
||||
|
||||
# Create repository
|
||||
repo = GithubRepository.objects.create(
|
||||
name=name,
|
||||
url=url,
|
||||
config=config,
|
||||
repository_id=repository_id,
|
||||
owner=owner,
|
||||
project_id=project_id,
|
||||
)
|
||||
|
||||
# Create a Label for github
|
||||
label = Label.objects.filter(
|
||||
name="GitHub",
|
||||
project_id=project_id,
|
||||
).first()
|
||||
|
||||
if label is None:
|
||||
label = Label.objects.create(
|
||||
name="GitHub",
|
||||
project_id=project_id,
|
||||
description="Label to sync Plane issues with GitHub issues",
|
||||
color="#003773",
|
||||
)
|
||||
|
||||
# Create repo sync
|
||||
repo_sync = GithubRepositorySync.objects.create(
|
||||
repository=repo,
|
||||
workspace_integration=workspace_integration,
|
||||
actor=workspace_integration.actor,
|
||||
credentials=request.data.get("credentials", {}),
|
||||
project_id=project_id,
|
||||
label=label,
|
||||
)
|
||||
|
||||
# Add bot as a member in the project
|
||||
_ = ProjectMember.objects.get_or_create(
|
||||
member=workspace_integration.actor, role=20, project_id=project_id
|
||||
)
|
||||
|
||||
# Return Response
|
||||
return Response(
|
||||
GithubRepositorySyncSerializer(repo_sync).data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
|
||||
class GithubIssueSyncViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
serializer_class = GithubIssueSyncSerializer
|
||||
model = GithubIssueSync
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save(
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
repository_sync_id=self.kwargs.get("repo_sync_id"),
|
||||
)
|
||||
|
||||
|
||||
class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
|
||||
def post(self, request, slug, project_id, repo_sync_id):
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
|
||||
github_issue_syncs = request.data.get("github_issue_syncs", [])
|
||||
github_issue_syncs = GithubIssueSync.objects.bulk_create(
|
||||
[
|
||||
GithubIssueSync(
|
||||
issue_id=github_issue_sync.get("issue"),
|
||||
repo_issue_id=github_issue_sync.get("repo_issue_id"),
|
||||
issue_url=github_issue_sync.get("issue_url"),
|
||||
github_issue_id=github_issue_sync.get("github_issue_id"),
|
||||
repository_sync_id=repo_sync_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
for github_issue_sync in github_issue_syncs
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
class GithubCommentSyncViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
serializer_class = GithubCommentSyncSerializer
|
||||
model = GithubCommentSync
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save(
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
issue_sync_id=self.kwargs.get("issue_sync_id"),
|
||||
)
|
||||
95
apiserver/plane/app/views/integration/slack.py
Normal file
95
apiserver/plane/app/views/integration/slack.py
Normal file
@@ -0,0 +1,95 @@
|
||||
# Django import
|
||||
from django.db import IntegrityError
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.app.views import BaseViewSet
|
||||
from plane.db.models import (
|
||||
SlackProjectSync,
|
||||
WorkspaceIntegration,
|
||||
ProjectMember,
|
||||
)
|
||||
from plane.app.serializers import SlackProjectSyncSerializer
|
||||
from plane.app.permissions import (
|
||||
ProjectBasePermission,
|
||||
)
|
||||
from plane.utils.integrations.slack import slack_oauth
|
||||
|
||||
|
||||
class SlackProjectSyncViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectBasePermission,
|
||||
]
|
||||
serializer_class = SlackProjectSyncSerializer
|
||||
model = SlackProjectSync
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
)
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, workspace_integration_id):
|
||||
try:
|
||||
code = request.data.get("code", False)
|
||||
|
||||
if not code:
|
||||
return Response(
|
||||
{"error": "Code is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
slack_response = slack_oauth(code=code)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
workspace__slug=slug, pk=workspace_integration_id
|
||||
)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
pk=workspace_integration_id, workspace__slug=slug
|
||||
)
|
||||
slack_project_sync = SlackProjectSync.objects.create(
|
||||
access_token=slack_response.get("access_token"),
|
||||
scopes=slack_response.get("scope"),
|
||||
bot_user_id=slack_response.get("bot_user_id"),
|
||||
webhook_url=slack_response.get("incoming_webhook", {}).get(
|
||||
"url"
|
||||
),
|
||||
data=slack_response,
|
||||
team_id=slack_response.get("team", {}).get("id"),
|
||||
team_name=slack_response.get("team", {}).get("name"),
|
||||
workspace_integration=workspace_integration,
|
||||
project_id=project_id,
|
||||
)
|
||||
_ = ProjectMember.objects.get_or_create(
|
||||
member=workspace_integration.actor,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
)
|
||||
serializer = SlackProjectSyncSerializer(slack_project_sync)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except IntegrityError as e:
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"error": "Slack is already installed for the project"},
|
||||
status=status.HTTP_410_GONE,
|
||||
)
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{
|
||||
"error": "Slack could not be installed. Please try again later"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
@@ -19,12 +19,11 @@ from django.views.decorators.gzip import gzip_page
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from plane.app.permissions import (
|
||||
ProjectEntityPermission,
|
||||
)
|
||||
# Module imports
|
||||
from .. import BaseViewSet
|
||||
from plane.app.serializers import (
|
||||
IssueFlatSerializer,
|
||||
IssueSerializer,
|
||||
IssueFlatSerializer,
|
||||
IssueDetailSerializer,
|
||||
)
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
@@ -49,9 +48,6 @@ from plane.utils.paginator import (
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.utils.error_codes import ERROR_CODES
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet, BaseAPIView
|
||||
|
||||
|
||||
class IssueArchiveViewSet(BaseViewSet):
|
||||
serializer_class = IssueFlatSerializer
|
||||
@@ -267,7 +263,10 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
if issue.state.group not in ["completed", "cancelled"]:
|
||||
return Response(
|
||||
{
|
||||
"error": "Can only archive completed or cancelled state group issue"
|
||||
"error_code": ERROR_CODES[
|
||||
"INVALID_ARCHIVE_STATE_GROUP"
|
||||
],
|
||||
"error_message": "INVALID_ARCHIVE_STATE_GROUP",
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
@@ -321,61 +320,3 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
issue.save()
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class BulkArchiveIssuesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def post(self, request, slug, project_id):
|
||||
issue_ids = request.data.get("issue_ids", [])
|
||||
|
||||
if not len(issue_ids):
|
||||
return Response(
|
||||
{"error": "Issue IDs are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
issues = Issue.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk__in=issue_ids
|
||||
).select_related("state")
|
||||
bulk_archive_issues = []
|
||||
for issue in issues:
|
||||
if issue.state.group not in ["completed", "cancelled"]:
|
||||
return Response(
|
||||
{
|
||||
"error_code": ERROR_CODES[
|
||||
"INVALID_ARCHIVE_STATE_GROUP"
|
||||
],
|
||||
"error_message": "INVALID_ARCHIVE_STATE_GROUP",
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"archived_at": str(timezone.now().date()),
|
||||
"automation": False,
|
||||
}
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
issue.archived_at = timezone.now().date()
|
||||
bulk_archive_issues.append(issue)
|
||||
Issue.objects.bulk_update(bulk_archive_issues, ["archived_at"])
|
||||
|
||||
return Response(
|
||||
{"archived_at": str(timezone.now().date())},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@@ -14,8 +14,8 @@ from rest_framework.parsers import MultiPartParser, FormParser
|
||||
from .. import BaseAPIView
|
||||
from plane.app.serializers import IssueAttachmentSerializer
|
||||
from plane.db.models import IssueAttachment
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
|
||||
class IssueAttachmentEndpoint(BaseAPIView):
|
||||
|
||||
@@ -168,6 +168,7 @@ class IssueListEndpoint(BaseAPIView):
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
"deleted_at",
|
||||
"type_id",
|
||||
)
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
issues = user_timezone_converter(
|
||||
@@ -418,6 +419,7 @@ class IssueViewSet(BaseViewSet):
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
"deleted_at",
|
||||
"type_id",
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
@@ -269,6 +269,7 @@ class IssueDraftViewSet(BaseViewSet):
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
"type_id",
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
@@ -143,6 +143,7 @@ class IssueRelationViewSet(BaseViewSet):
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"relation_type",
|
||||
"type_id",
|
||||
]
|
||||
|
||||
response_data = {
|
||||
|
||||
@@ -133,6 +133,7 @@ class SubIssuesEndpoint(BaseAPIView):
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
"type_id",
|
||||
)
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
sub_issues = user_timezone_converter(
|
||||
|
||||
@@ -123,3 +123,4 @@ class IssueSubscriberViewSet(BaseViewSet):
|
||||
return Response(
|
||||
{"subscribed": issue_subscriber}, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
|
||||
@@ -32,6 +32,7 @@ from plane.db.models import (
|
||||
UserFavorite,
|
||||
ProjectMember,
|
||||
ProjectPage,
|
||||
DeployBoard,
|
||||
)
|
||||
from plane.utils.error_codes import ERROR_CODES
|
||||
# Module imports
|
||||
@@ -117,6 +118,13 @@ class PageViewSet(BaseViewSet):
|
||||
),
|
||||
)
|
||||
.filter(project=True)
|
||||
.annotate(
|
||||
anchor=DeployBoard.objects.filter(
|
||||
entity_name="page",
|
||||
entity_identifier=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
).values("anchor")
|
||||
)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
@@ -388,6 +396,12 @@ class PageViewSet(BaseViewSet):
|
||||
entity_identifier=pk,
|
||||
entity_type="page",
|
||||
).delete()
|
||||
# Delete the deploy board
|
||||
DeployBoard.objects.filter(
|
||||
entity_name="page",
|
||||
entity_identifier=pk,
|
||||
workspace__slug=slug,
|
||||
).delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
|
||||
@@ -54,6 +54,16 @@ from plane.utils.cache import cache_response
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
|
||||
# EE imports
|
||||
from plane.ee.models import ProjectState, ProjectAttribute
|
||||
from plane.ee.utils.workspace_feature import (
|
||||
WorkspaceFeatureContext,
|
||||
check_workspace_feature,
|
||||
)
|
||||
from plane.ee.serializers.app.project import ProjectAttributeSerializer
|
||||
from plane.payment.flags.flag_decorator import check_workspace_feature_flag
|
||||
from plane.payment.flags.flag import FeatureFlag
|
||||
|
||||
|
||||
class ProjectViewSet(BaseViewSet):
|
||||
serializer_class = ProjectListSerializer
|
||||
@@ -67,6 +77,14 @@ class ProjectViewSet(BaseViewSet):
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
is_active=True,
|
||||
).values("sort_order")
|
||||
|
||||
# EE: project_grouping starts
|
||||
state_id = ProjectAttribute.objects.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=OuterRef("pk"),
|
||||
).values("state_id")
|
||||
# EE: project_grouping ends
|
||||
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
@@ -141,6 +159,27 @@ class ProjectViewSet(BaseViewSet):
|
||||
).values("anchor")
|
||||
)
|
||||
.annotate(sort_order=Subquery(sort_order))
|
||||
# EE: project_grouping starts
|
||||
.annotate(state_id=Subquery(state_id))
|
||||
.annotate(
|
||||
priority=ProjectAttribute.objects.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=OuterRef("pk"),
|
||||
).values("priority")
|
||||
)
|
||||
.annotate(
|
||||
start_date=ProjectAttribute.objects.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=OuterRef("pk"),
|
||||
).values("start_date")
|
||||
)
|
||||
.annotate(
|
||||
target_date=ProjectAttribute.objects.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=OuterRef("pk"),
|
||||
).values("target_date")
|
||||
)
|
||||
# EE: project_grouping ends
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"project_projectmember",
|
||||
@@ -364,6 +403,42 @@ class ProjectViewSet(BaseViewSet):
|
||||
]
|
||||
)
|
||||
|
||||
# validating the PROJECT_GROUPING feature flag is enabled
|
||||
if check_workspace_feature_flag(
|
||||
feature_key=FeatureFlag.PROJECT_GROUPING,
|
||||
slug=slug,
|
||||
user_id=str(request.user.id),
|
||||
default_value=False,
|
||||
):
|
||||
# validating the is_project_grouping_enabled workspace feature is enabled
|
||||
if check_workspace_feature(
|
||||
slug,
|
||||
WorkspaceFeatureContext.IS_PROJECT_GROUPING_ENABLED,
|
||||
):
|
||||
state_id = request.data.get("state_id", None)
|
||||
priority = request.data.get("priority", "none")
|
||||
start_date = request.data.get("start_date", None)
|
||||
target_date = request.data.get("target_date", None)
|
||||
|
||||
if state_id is None:
|
||||
state_id = (
|
||||
ProjectState.objects.filter(
|
||||
workspace=workspace, default=True
|
||||
)
|
||||
.values_list("id", flat=True)
|
||||
.first()
|
||||
)
|
||||
|
||||
# also create project attributes
|
||||
_ = ProjectAttribute.objects.create(
|
||||
project_id=serializer.data.get("id"),
|
||||
state_id=state_id,
|
||||
priority=priority,
|
||||
start_date=start_date,
|
||||
target_date=target_date,
|
||||
workspace_id=workspace.id,
|
||||
)
|
||||
|
||||
project = (
|
||||
self.get_queryset()
|
||||
.filter(pk=serializer.data["id"])
|
||||
@@ -447,6 +522,34 @@ class ProjectViewSet(BaseViewSet):
|
||||
is_triage=True,
|
||||
)
|
||||
|
||||
# EE: project_grouping starts
|
||||
# validating the PROJECT_GROUPING feature flag is enabled
|
||||
if check_workspace_feature_flag(
|
||||
feature_key=FeatureFlag.PROJECT_GROUPING,
|
||||
slug=slug,
|
||||
user_id=str(request.user.id),
|
||||
default_value=False,
|
||||
):
|
||||
# validating the is_project_grouping_enabled workspace feature is enabled
|
||||
if check_workspace_feature(
|
||||
slug,
|
||||
WorkspaceFeatureContext.IS_PROJECT_GROUPING_ENABLED,
|
||||
):
|
||||
project_attribute = ProjectAttribute.objects.filter(
|
||||
project_id=project.id
|
||||
).first()
|
||||
if project_attribute is not None:
|
||||
project_attribute_serializer = (
|
||||
ProjectAttributeSerializer(
|
||||
project_attribute,
|
||||
data=request.data,
|
||||
partial=True,
|
||||
)
|
||||
)
|
||||
if project_attribute_serializer.is_valid():
|
||||
project_attribute_serializer.save()
|
||||
# EE: project_grouping ends
|
||||
|
||||
project = (
|
||||
self.get_queryset()
|
||||
.filter(pk=serializer.data["id"])
|
||||
@@ -485,17 +588,34 @@ class ProjectViewSet(BaseViewSet):
|
||||
status=status.HTTP_410_GONE,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, pk):
|
||||
project = Project.objects.get(pk=pk)
|
||||
project.delete()
|
||||
|
||||
# Delete the project members
|
||||
DeployBoard.objects.filter(
|
||||
project_id=pk,
|
||||
workspace__slug=slug,
|
||||
).delete()
|
||||
|
||||
# Delete the user favorite
|
||||
UserFavorite.objects.filter(
|
||||
project_id=pk,
|
||||
workspace__slug=slug,
|
||||
).delete()
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class ProjectArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def post(self, request, slug, project_id):
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
project.archived_at = timezone.now()
|
||||
project.save()
|
||||
UserFavorite.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
project=project_id,
|
||||
).delete()
|
||||
return Response(
|
||||
{"archived_at": str(project.archived_at)},
|
||||
@@ -663,7 +783,9 @@ class ProjectPublicCoverImagesEndpoint(BaseAPIView):
|
||||
# Extracting file keys from the response
|
||||
if "Contents" in response:
|
||||
for content in response["Contents"]:
|
||||
if not content["Key"].endswith(
|
||||
if not content[
|
||||
"Key"
|
||||
].endswith(
|
||||
"/"
|
||||
): # This line ensures we're only getting files, not "sub-folders"
|
||||
files.append(
|
||||
|
||||
@@ -27,6 +27,7 @@ from plane.db.models import (
|
||||
WorkspaceMember,
|
||||
IssueUserProperty,
|
||||
)
|
||||
from plane.payment.bgtasks.member_sync_task import member_sync_task
|
||||
|
||||
|
||||
class ProjectInvitationsViewset(BaseViewSet):
|
||||
@@ -247,6 +248,9 @@ class ProjectJoinEndpoint(BaseAPIView):
|
||||
workspace_member.is_active = True
|
||||
workspace_member.save()
|
||||
|
||||
# Sync workspace members
|
||||
member_sync_task.delay(slug)
|
||||
|
||||
# Check if the user was already a member of project then activate the user
|
||||
project_member = ProjectMember.objects.filter(
|
||||
workspace_id=project_invite.workspace_id, member=user
|
||||
|
||||
@@ -22,6 +22,7 @@ from plane.db.models import (
|
||||
Page,
|
||||
IssueView,
|
||||
ProjectPage,
|
||||
ProjectMember,
|
||||
)
|
||||
|
||||
|
||||
@@ -90,6 +91,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
"type_id",
|
||||
)
|
||||
|
||||
def filter_cycles(self, query, slug, project_id, workspace_search):
|
||||
@@ -260,3 +262,202 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
func = MODELS_MAPPER.get(model, None)
|
||||
results[model] = func(query, slug, project_id, workspace_search)
|
||||
return Response({"results": results}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class SearchEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, project_id):
|
||||
query = request.query_params.get("query", False)
|
||||
query_type = request.query_params.get("query_type", "issue")
|
||||
count = int(request.query_params.get("count", 5))
|
||||
|
||||
if query_type == "mention":
|
||||
fields = ["member__first_name", "member__last_name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
users = (
|
||||
ProjectMember.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.values(
|
||||
"member__first_name",
|
||||
"member__last_name",
|
||||
"member__avatar",
|
||||
"member__display_name",
|
||||
"member__id",
|
||||
)[:count]
|
||||
)
|
||||
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
pages = (
|
||||
Page.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
access=0,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.values("name", "id")[:count]
|
||||
)
|
||||
return Response(
|
||||
{"users": users, "pages": pages}, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
if query_type == "project":
|
||||
fields = ["name", "identifier"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
projects = (
|
||||
Project.objects.filter(
|
||||
q,
|
||||
Q(project_projectmember__member=self.request.user)
|
||||
| Q(network=2),
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values("name", "id", "identifier", "workspace__slug")[:count]
|
||||
)
|
||||
return Response(projects, status=status.HTTP_200_OK)
|
||||
|
||||
if query_type == "issue":
|
||||
fields = ["name", "sequence_id", "project__identifier"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
if field == "sequence_id":
|
||||
# Match whole integers only (exclude decimal numbers)
|
||||
sequences = re.findall(r"\b\d+\b", query)
|
||||
for sequence_id in sequences:
|
||||
q |= Q(**{"sequence_id": sequence_id})
|
||||
else:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
issues = (
|
||||
Issue.issue_objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"priority",
|
||||
"state_id",
|
||||
"type_id",
|
||||
)[:count]
|
||||
)
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
|
||||
if query_type == "cycle":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
cycles = (
|
||||
Cycle.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)[:count]
|
||||
)
|
||||
return Response(cycles, status=status.HTTP_200_OK)
|
||||
|
||||
if query_type == "module":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
modules = (
|
||||
Module.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)[:count]
|
||||
)
|
||||
return Response(modules, status=status.HTTP_200_OK)
|
||||
|
||||
if query_type == "page":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
pages = (
|
||||
Page.objects.filter(
|
||||
q,
|
||||
projects__project_projectmember__member=self.request.user,
|
||||
projects__project_projectmember__is_active=True,
|
||||
projects__id=project_id,
|
||||
workspace__slug=slug,
|
||||
access=0,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"projects__id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)[:count]
|
||||
)
|
||||
return Response(pages, status=status.HTTP_200_OK)
|
||||
|
||||
return Response(
|
||||
{"error": "Please provide a valid query"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
# Python imports
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Q
|
||||
|
||||
@@ -97,6 +95,7 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
"state__name",
|
||||
"state__group",
|
||||
"state__color",
|
||||
"type_id",
|
||||
),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
139
apiserver/plane/app/views/search/workspace.py
Normal file
139
apiserver/plane/app/views/search/workspace.py
Normal file
@@ -0,0 +1,139 @@
|
||||
# Python imports
|
||||
import re
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Q
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.app.views import BaseAPIView
|
||||
from plane.db.models import Workspace, Page, Issue
|
||||
from plane.app.permissions import WorkspaceEntityPermission
|
||||
|
||||
|
||||
class WorkspaceSearchEndpoint(BaseAPIView):
|
||||
"""Endpoint to search across multiple fields in the workspace and
|
||||
also show related workspace if found
|
||||
"""
|
||||
|
||||
permission_classes = [
|
||||
WorkspaceEntityPermission,
|
||||
]
|
||||
|
||||
def filter_workspaces(self, query, slug):
|
||||
"""Filter workspaces based on the query"""
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Workspace.objects.filter(
|
||||
q, workspace_member__member=self.request.user
|
||||
)
|
||||
.distinct()
|
||||
.values("name", "id", "slug")
|
||||
)
|
||||
|
||||
def filter_pages(self, query, slug):
|
||||
"""Filter pages based on the query"""
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Page.objects.filter(
|
||||
q,
|
||||
workspace__slug=slug,
|
||||
archived_at__isnull=True,
|
||||
is_global=True,
|
||||
)
|
||||
.filter(
|
||||
Q(
|
||||
owned_by=self.request.user,
|
||||
)
|
||||
| Q(access=0)
|
||||
)
|
||||
.distinct()
|
||||
.values("name", "id", "workspace__slug")
|
||||
)
|
||||
|
||||
def get(self, request, slug):
|
||||
query = request.GET.get("search", False)
|
||||
if not query:
|
||||
return Response(
|
||||
{"error": "Search query is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
MODELS_MAPPER = {
|
||||
"workspace": self.filter_workspaces,
|
||||
"page": self.filter_pages,
|
||||
}
|
||||
|
||||
results = {}
|
||||
|
||||
for model in MODELS_MAPPER.keys():
|
||||
func = MODELS_MAPPER.get(model, None)
|
||||
results[model] = func(query, slug)
|
||||
return Response({"results": results}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class WorkspaceEntitySearchEndpoint(BaseAPIView):
|
||||
|
||||
permission_classes = [
|
||||
WorkspaceEntityPermission,
|
||||
]
|
||||
|
||||
def filter_issues(self, slug, query, count):
|
||||
"""Filter issues based on the query"""
|
||||
fields = ["name", "sequence_id", "project__identifier"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
if field == "sequence_id":
|
||||
# Match whole integers only (exclude decimal numbers)
|
||||
sequences = re.findall(r"\b\d+\b", query)
|
||||
for sequence_id in sequences:
|
||||
q |= Q(**{"sequence_id": sequence_id})
|
||||
else:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
issues = (
|
||||
Issue.issue_objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"priority",
|
||||
"state_id",
|
||||
"type_id",
|
||||
)[:count]
|
||||
)
|
||||
|
||||
return issues
|
||||
|
||||
def get(self, request, slug):
|
||||
query = request.query_params.get("query", False)
|
||||
query_type = request.query_params.get("query_type", "issue")
|
||||
count = int(request.query_params.get("count", 5))
|
||||
|
||||
MODELS_MAPPER = {
|
||||
"issue": self.filter_issues,
|
||||
}
|
||||
|
||||
func = MODELS_MAPPER.get(query_type, None)
|
||||
results = func(slug, query, count)
|
||||
return Response(results, status=status.HTTP_200_OK)
|
||||
@@ -40,6 +40,8 @@ from plane.utils.host import base_host
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.cache import cache_control
|
||||
from django.views.decorators.vary import vary_on_cookie
|
||||
from plane.payment.bgtasks.member_sync_task import member_sync_task
|
||||
|
||||
|
||||
|
||||
class UserEndpoint(BaseViewSet):
|
||||
@@ -171,6 +173,12 @@ class UserEndpoint(BaseViewSet):
|
||||
workspaces_to_deactivate, ["is_active"], batch_size=100
|
||||
)
|
||||
|
||||
# Sync workspace members
|
||||
[
|
||||
member_sync_task.delay(workspace.workspace.slug)
|
||||
for workspace in workspaces_to_deactivate
|
||||
]
|
||||
|
||||
# Delete all workspace invites
|
||||
WorkspaceMemberInvite.objects.filter(
|
||||
email=user.email,
|
||||
|
||||
@@ -35,6 +35,7 @@ from plane.db.models import (
|
||||
Workspace,
|
||||
WorkspaceMember,
|
||||
ProjectMember,
|
||||
DeployBoard,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
@@ -417,6 +418,14 @@ class IssueViewViewSet(BaseViewSet):
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.annotate(
|
||||
anchor=DeployBoard.objects.filter(
|
||||
entity_name="view",
|
||||
entity_identifier=OuterRef("pk"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
).values("anchor")
|
||||
)
|
||||
.order_by("-is_favorite", "name")
|
||||
.distinct()
|
||||
)
|
||||
@@ -526,6 +535,13 @@ class IssueViewViewSet(BaseViewSet):
|
||||
entity_identifier=pk,
|
||||
entity_type="view",
|
||||
).delete()
|
||||
# Delete the view from the deploy board
|
||||
DeployBoard.objects.filter(
|
||||
entity_name="view",
|
||||
entity_identifier=pk,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
).delete()
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Only admin or owner can delete the view"},
|
||||
|
||||
@@ -2,8 +2,10 @@
|
||||
import csv
|
||||
import io
|
||||
from datetime import date
|
||||
|
||||
import requests
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
# Django imports
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import (
|
||||
Count,
|
||||
@@ -15,8 +17,6 @@ from django.db.models import (
|
||||
)
|
||||
from django.db.models.fields import DateField
|
||||
from django.db.models.functions import Cast, ExtractDay, ExtractWeek
|
||||
|
||||
# Django imports
|
||||
from django.http import HttpResponse
|
||||
from django.utils import timezone
|
||||
|
||||
@@ -48,6 +48,8 @@ from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.cache import cache_control
|
||||
from django.views.decorators.vary import vary_on_cookie
|
||||
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
|
||||
from plane.payment.bgtasks.member_sync_task import member_sync_task
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class WorkSpaceViewSet(BaseViewSet):
|
||||
@@ -131,6 +133,9 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
role=20,
|
||||
company_role=request.data.get("company_role", ""),
|
||||
)
|
||||
|
||||
# Sync workspace members
|
||||
member_sync_task.delay(slug)
|
||||
return Response(
|
||||
serializer.data, status=status.HTTP_201_CREATED
|
||||
)
|
||||
@@ -163,6 +168,33 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
path="/api/users/me/settings/", multiple=True, user=False
|
||||
)
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
# Get the workspace
|
||||
workspace = self.get_object()
|
||||
|
||||
# Fetch the workspace subcription
|
||||
if settings.PAYMENT_SERVER_BASE_URL:
|
||||
# Make a cancel request to the payment server
|
||||
response = requests.post(
|
||||
f"{settings.PAYMENT_SERVER_BASE_URL}/api/subscriptions/check/",
|
||||
headers={
|
||||
"content-type": "application/json",
|
||||
"x-api-key": settings.PAYMENT_SERVER_AUTH_TOKEN,
|
||||
},
|
||||
json={"workspace_id": str(workspace.id)},
|
||||
)
|
||||
# Check if the response is successful
|
||||
response.raise_for_status()
|
||||
# Return the response
|
||||
response = response.json()
|
||||
# Check if the response contains the product key
|
||||
if response.get("subscription_exists"):
|
||||
return Response(
|
||||
{"error": "workspace has active subscription"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
else:
|
||||
# Delete the workspace
|
||||
return super().destroy(request, *args, **kwargs)
|
||||
return super().destroy(request, *args, **kwargs)
|
||||
|
||||
|
||||
@@ -384,7 +416,6 @@ class ExportWorkspaceUserActivityEndpoint(BaseAPIView):
|
||||
return csv_buffer
|
||||
|
||||
def post(self, request, slug, user_id):
|
||||
|
||||
if not request.data.get("date"):
|
||||
return Response(
|
||||
{"error": "Date is required"},
|
||||
|
||||
@@ -13,7 +13,6 @@ from plane.app.permissions import allow_permission, ROLE
|
||||
|
||||
|
||||
class WorkspaceFavoriteEndpoint(BaseAPIView):
|
||||
|
||||
@allow_permission(
|
||||
allowed_roles=[ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE"
|
||||
)
|
||||
@@ -76,7 +75,6 @@ class WorkspaceFavoriteEndpoint(BaseAPIView):
|
||||
|
||||
|
||||
class WorkspaceFavoriteGroupEndpoint(BaseAPIView):
|
||||
|
||||
@allow_permission(
|
||||
allowed_roles=[ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE"
|
||||
)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Python imports
|
||||
from datetime import datetime
|
||||
|
||||
import uuid
|
||||
import jwt
|
||||
|
||||
# Django imports
|
||||
@@ -22,7 +22,7 @@ from plane.app.serializers import (
|
||||
WorkSpaceMemberSerializer,
|
||||
)
|
||||
from plane.app.views.base import BaseAPIView
|
||||
from plane.bgtasks.event_tracking_task import workspace_invite_event
|
||||
from plane.bgtasks.event_tracking_task import track_event
|
||||
from plane.bgtasks.workspace_invitation_task import workspace_invitation
|
||||
from plane.db.models import (
|
||||
User,
|
||||
@@ -31,7 +31,7 @@ from plane.db.models import (
|
||||
WorkspaceMemberInvite,
|
||||
)
|
||||
from plane.utils.cache import invalidate_cache, invalidate_cache_directly
|
||||
|
||||
from plane.payment.bgtasks.member_sync_task import member_sync_task
|
||||
from .. import BaseViewSet
|
||||
|
||||
|
||||
@@ -103,6 +103,33 @@ class WorkspaceInvitationsViewset(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get current existing workspace invitations where accepted is False
|
||||
# TODO: Uncomment this code block after implementing the workspace_member_check function
|
||||
# workspace_invitations = (
|
||||
# WorkspaceMemberInvite.objects.filter(
|
||||
# workspace_id=workspace.id,
|
||||
# )
|
||||
# .annotate(
|
||||
# user_email=F("email"), user_id=F("id"), user_role=F("role")
|
||||
# )
|
||||
# .values("user_email", "user_id", "user_role")
|
||||
# )
|
||||
|
||||
# # Check the invite flow
|
||||
# allowed_status, allowed_admin_members, allowed_guest_viewers = (
|
||||
# workspace_member_check(workspace_invitations, emails, slug)
|
||||
# )
|
||||
|
||||
# if not allowed_status:
|
||||
# return Response(
|
||||
# {
|
||||
# "error": "You cannot invite more users than the allowed limit",
|
||||
# "allowed_admin_members": allowed_admin_members,
|
||||
# "allowed_guest_viewers": allowed_guest_viewers,
|
||||
# },
|
||||
# status=status.HTTP_400_BAD_REQUEST,
|
||||
# )
|
||||
|
||||
workspace_invitations = []
|
||||
for email in emails:
|
||||
try:
|
||||
@@ -154,6 +181,61 @@ class WorkspaceInvitationsViewset(BaseViewSet):
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
# TODO: Uncomment this code block after implementing the workspace_member_check function
|
||||
# def partial_update(self, request, slug, pk):
|
||||
# workspace_invitations = (
|
||||
# WorkspaceMemberInvite.objects.filter(
|
||||
# ~Q(id=pk),
|
||||
# workspace__slug=slug,
|
||||
# )
|
||||
# .annotate(
|
||||
# user_email=F("email"), user_id=F("id"), user_role=F("role")
|
||||
# )
|
||||
# .values("user_email", "user_id", "user_role")
|
||||
# )
|
||||
|
||||
# workspace_invitation = WorkspaceMemberInvite.objects.get(
|
||||
# pk=pk, workspace__slug=slug
|
||||
# )
|
||||
|
||||
# # Check the invite flow
|
||||
# allowed_status, allowed_admin_members, allowed_guest_viewers = (
|
||||
# workspace_member_check(
|
||||
# workspace_invitations,
|
||||
# [
|
||||
# {
|
||||
# "email": workspace_invitation.email,
|
||||
# "role": request.data.get(
|
||||
# "role", workspace_invitation.role
|
||||
# ),
|
||||
# }
|
||||
# ],
|
||||
# slug,
|
||||
# )
|
||||
# )
|
||||
|
||||
# if not allowed_status:
|
||||
# return Response(
|
||||
# {
|
||||
# "error": "You cannot invite more users than the allowed limit",
|
||||
# "allowed_admin_members": allowed_admin_members,
|
||||
# "allowed_guest_viewers": allowed_guest_viewers,
|
||||
# },
|
||||
# status=status.HTTP_400_BAD_REQUEST,
|
||||
# )
|
||||
|
||||
# if not status:
|
||||
# return Response(
|
||||
# {
|
||||
# "error": "You cannot invite more users than the allowed limit",
|
||||
# "allowed_admin_members": allowed_admin_members,
|
||||
# "allowed_guest_viewers": allowed_guest_viewers,
|
||||
# },
|
||||
# status=status.HTTP_400_BAD_REQUEST,
|
||||
# )
|
||||
|
||||
# return super().partial_update(request, slug, pk)
|
||||
|
||||
def destroy(self, request, slug, pk):
|
||||
workspace_member_invite = WorkspaceMemberInvite.objects.get(
|
||||
pk=pk, workspace__slug=slug
|
||||
@@ -227,15 +309,25 @@ class WorkspaceJoinEndpoint(BaseAPIView):
|
||||
workspace_invite.delete()
|
||||
|
||||
# Send event
|
||||
workspace_invite_event.delay(
|
||||
user=user.id if user is not None else None,
|
||||
track_event.delay(
|
||||
email=email,
|
||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
event_name="MEMBER_ACCEPTED",
|
||||
accepted_from="EMAIL",
|
||||
properties={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"user": {"email": email, "id": str(user)},
|
||||
"device_ctx": {
|
||||
"ip": request.META.get("REMOTE_ADDR", None),
|
||||
"user_agent": request.META.get(
|
||||
"HTTP_USER_AGENT", None
|
||||
),
|
||||
},
|
||||
"accepted_from": "EMAIL",
|
||||
},
|
||||
)
|
||||
|
||||
# sync workspace members
|
||||
member_sync_task.delay(slug)
|
||||
|
||||
return Response(
|
||||
{"message": "Workspace Invitation Accepted"},
|
||||
status=status.HTTP_200_OK,
|
||||
@@ -308,6 +400,12 @@ class UserWorkspaceInvitationsViewSet(BaseViewSet):
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Sync workspace members
|
||||
[
|
||||
member_sync_task.delay(invitation.workspace.slug)
|
||||
for invitation in workspace_invitations
|
||||
]
|
||||
|
||||
# Delete joined workspace invites
|
||||
workspace_invitations.delete()
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ from plane.db.models import (
|
||||
WorkspaceMember,
|
||||
)
|
||||
from plane.utils.cache import cache_response, invalidate_cache
|
||||
|
||||
from plane.payment.bgtasks.member_sync_task import member_sync_task
|
||||
from .. import BaseViewSet
|
||||
|
||||
|
||||
@@ -140,12 +140,48 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# TODO: Check if the workspace has reached the maximum limit of admins or guests
|
||||
# allowed, allowed_admins, allowed_guests = workspace_member_check(
|
||||
# slug=slug,
|
||||
# current_invite_list=[],
|
||||
# requested_invite_list=[],
|
||||
# )
|
||||
|
||||
# if "role" in request.data:
|
||||
# requested_role = int(request.data.get("role"))
|
||||
|
||||
# if (
|
||||
# requested_role > 10
|
||||
# and allowed_admins is not None
|
||||
# and allowed_admins - 1 < 0
|
||||
# ):
|
||||
# return Response(
|
||||
# {
|
||||
# "error": "You cannot update the role to admin as the workspace has reached the maximum limit of admins"
|
||||
# },
|
||||
# status=status.HTTP_400_BAD_REQUEST,
|
||||
# )
|
||||
|
||||
# if (
|
||||
# requested_role <= 10
|
||||
# and allowed_guests is not None
|
||||
# and allowed_guests - 1 < 0
|
||||
# ):
|
||||
# return Response(
|
||||
# {
|
||||
# "error": "You cannot update the role to guest or viewer as the workspace has reached the maximum limit of guests"
|
||||
# },
|
||||
# status=status.HTTP_400_BAD_REQUEST,
|
||||
# )
|
||||
|
||||
serializer = WorkSpaceMemberSerializer(
|
||||
workspace_member, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
# Sync workspace members
|
||||
member_sync_task.delay(slug)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@@ -221,6 +257,10 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
|
||||
workspace_member.is_active = False
|
||||
workspace_member.save()
|
||||
|
||||
# Sync workspace members
|
||||
member_sync_task.delay(slug)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@invalidate_cache(
|
||||
@@ -288,6 +328,9 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
# # Deactivate the user
|
||||
workspace_member.is_active = False
|
||||
workspace_member.save()
|
||||
|
||||
# # Sync workspace members
|
||||
member_sync_task.delay(slug)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
|
||||
@@ -40,6 +40,12 @@ AUTHENTICATION_ERROR_CODES = {
|
||||
"GOOGLE_OAUTH_PROVIDER_ERROR": 5115,
|
||||
"GITHUB_OAUTH_PROVIDER_ERROR": 5120,
|
||||
"GITLAB_OAUTH_PROVIDER_ERROR": 5121,
|
||||
# OIDC
|
||||
"OIDC_NOT_CONFIGURED": 5190,
|
||||
"OIDC_PROVIDER_ERROR": 5195,
|
||||
# SAML
|
||||
"SAML_NOT_CONFIGURED": 5190,
|
||||
"SAML_PROVIDER_ERROR": 5195,
|
||||
# Reset Password
|
||||
"INVALID_PASSWORD_TOKEN": 5125,
|
||||
"EXPIRED_PASSWORD_TOKEN": 5130,
|
||||
@@ -47,7 +53,7 @@ AUTHENTICATION_ERROR_CODES = {
|
||||
"INCORRECT_OLD_PASSWORD": 5135,
|
||||
"MISSING_PASSWORD": 5138,
|
||||
"INVALID_NEW_PASSWORD": 5140,
|
||||
# set passowrd
|
||||
# set password
|
||||
"PASSWORD_ALREADY_SET": 5145,
|
||||
# Admin
|
||||
"ADMIN_ALREADY_EXIST": 5150,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# Python imports
|
||||
import requests
|
||||
import os
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
@@ -46,6 +47,8 @@ class OauthAdapter(Adapter):
|
||||
return "GITHUB_OAUTH_PROVIDER_ERROR"
|
||||
elif self.provider == "gitlab":
|
||||
return "GITLAB_OAUTH_PROVIDER_ERROR"
|
||||
elif self.provider == "oidc":
|
||||
return "OIDC_PROVIDER_ERROR"
|
||||
else:
|
||||
return "OAUTH_NOT_CONFIGURED"
|
||||
|
||||
@@ -67,7 +70,10 @@ class OauthAdapter(Adapter):
|
||||
try:
|
||||
headers = headers or {}
|
||||
response = requests.post(
|
||||
self.get_token_url(), data=data, headers=headers
|
||||
self.get_token_url(),
|
||||
data=data,
|
||||
headers=headers,
|
||||
verify=os.environ.get("SSL_VERIFY", "1") == "1",
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
@@ -83,7 +89,11 @@ class OauthAdapter(Adapter):
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.token_data.get('access_token')}"
|
||||
}
|
||||
response = requests.get(self.get_user_info_url(), headers=headers)
|
||||
response = requests.get(
|
||||
self.get_user_info_url(),
|
||||
headers=headers,
|
||||
verify=os.environ.get("SSL_VERIFY", "1") == "1",
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.RequestException:
|
||||
|
||||
214
apiserver/plane/authentication/adapter/saml.py
Normal file
214
apiserver/plane/authentication/adapter/saml.py
Normal file
@@ -0,0 +1,214 @@
|
||||
# Python imports
|
||||
import os
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from onelogin.saml2.auth import OneLogin_Saml2_Auth
|
||||
|
||||
# Module imports
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from .base import Adapter
|
||||
from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
|
||||
class SAMLAdapter(Adapter):
|
||||
|
||||
provider = "saml"
|
||||
auth = None
|
||||
saml_config = {}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
request,
|
||||
):
|
||||
(
|
||||
SAML_ENTITY_ID,
|
||||
SAML_SSO_URL,
|
||||
SAML_LOGOUT_URL,
|
||||
SAML_CERTIFICATE,
|
||||
) = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "SAML_ENTITY_ID",
|
||||
"default": os.environ.get("SAML_ENTITY_ID"),
|
||||
},
|
||||
{
|
||||
"key": "SAML_SSO_URL",
|
||||
"default": os.environ.get("SAML_SSO_URL"),
|
||||
},
|
||||
{
|
||||
"key": "SAML_LOGOUT_URL",
|
||||
"default": os.environ.get("SAML_LOGOUT_URL"),
|
||||
},
|
||||
{
|
||||
"key": "SAML_CERTIFICATE",
|
||||
"default": os.environ.get("SAML_CERTIFICATE"),
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
if not (SAML_ENTITY_ID and SAML_SSO_URL and SAML_CERTIFICATE):
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["SAML_NOT_CONFIGURED"],
|
||||
error_message="SAML_NOT_CONFIGURED",
|
||||
)
|
||||
|
||||
super().__init__(request, self.provider)
|
||||
req = self.prepare_saml_request(self.request)
|
||||
saml_config = self.generate_saml_configuration(
|
||||
request=request,
|
||||
entity_id=SAML_ENTITY_ID,
|
||||
sso_url=SAML_SSO_URL,
|
||||
logout_url=SAML_LOGOUT_URL,
|
||||
idp_certificate=SAML_CERTIFICATE,
|
||||
)
|
||||
|
||||
# Generate configuration
|
||||
self.saml_config = saml_config
|
||||
auth = OneLogin_Saml2_Auth(
|
||||
req,
|
||||
saml_config,
|
||||
)
|
||||
self.auth = auth
|
||||
|
||||
def generate_saml_configuration(
|
||||
self,
|
||||
request,
|
||||
entity_id,
|
||||
sso_url,
|
||||
logout_url,
|
||||
idp_certificate,
|
||||
):
|
||||
return {
|
||||
"strict": True,
|
||||
"debug": settings.DEBUG,
|
||||
"sp": {
|
||||
"entityId": f"{request.scheme}://{request.get_host()}/auth/saml/metadata/",
|
||||
"assertionConsumerService": {
|
||||
"url": f"{request.scheme}://{request.get_host()}/auth/saml/callback/",
|
||||
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST",
|
||||
},
|
||||
},
|
||||
"idp": {
|
||||
"entityId": entity_id,
|
||||
"singleSignOnService": {
|
||||
"url": sso_url,
|
||||
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect",
|
||||
},
|
||||
"singleLogoutService": {
|
||||
"url": logout_url,
|
||||
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect",
|
||||
},
|
||||
"x509cert": idp_certificate,
|
||||
},
|
||||
"attributeConsumingService": {
|
||||
"serviceName": "Plane SAML",
|
||||
"serviceDescription": "Plane SAML",
|
||||
"requestedAttributes": [
|
||||
{
|
||||
"name": "first_name",
|
||||
"friendlyName": "user.firstName",
|
||||
"isRequired": False,
|
||||
"nameFormat": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic",
|
||||
},
|
||||
{
|
||||
"name": "last_name",
|
||||
"friendlyName": "user.lastName",
|
||||
"isRequired": False,
|
||||
"nameFormat": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic",
|
||||
},
|
||||
{
|
||||
"name": "email",
|
||||
"friendlyName": "user.email",
|
||||
"isRequired": True,
|
||||
"nameFormat": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
def prepare_saml_request(self, request):
|
||||
return {
|
||||
"https": "on" if request.is_secure() else "off",
|
||||
"http_host": request.get_host(),
|
||||
"script_name": request.path,
|
||||
"get_data": request.GET.copy(),
|
||||
"post_data": request.POST.copy(),
|
||||
}
|
||||
|
||||
def get_auth_url(self):
|
||||
return self.auth.login()
|
||||
|
||||
def authenticate(self):
|
||||
self.auth.process_response()
|
||||
errors = self.auth.get_errors()
|
||||
if errors:
|
||||
if not self.auth.is_authenticated():
|
||||
# Log the errors
|
||||
log_exception(Exception(errors))
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES[
|
||||
"SAML_PROVIDER_ERROR"
|
||||
],
|
||||
error_message="SAML_PROVIDER_ERROR",
|
||||
)
|
||||
# Log the errors
|
||||
log_exception(Exception(errors))
|
||||
raise AuthenticationException(
|
||||
error_message=AUTHENTICATION_ERROR_CODES[
|
||||
"SAML_PROVIDER_ERROR"
|
||||
],
|
||||
error_code="SAML_PROVIDER_ERROR",
|
||||
)
|
||||
attributes = self.auth.get_attributes()
|
||||
|
||||
email = (
|
||||
attributes.get("email")[0]
|
||||
if attributes.get("email") and len(attributes.get("email"))
|
||||
else None
|
||||
)
|
||||
|
||||
if not email:
|
||||
raise AuthenticationException(
|
||||
error_message=AUTHENTICATION_ERROR_CODES[
|
||||
"SAML_PROVIDER_ERROR"
|
||||
],
|
||||
error_code="SAML_PROVIDER_ERROR",
|
||||
)
|
||||
|
||||
first_name = (
|
||||
attributes.get("first_name")[0]
|
||||
if attributes.get("first_name")
|
||||
and len(attributes.get("first_name"))
|
||||
else ""
|
||||
)
|
||||
|
||||
last_name = (
|
||||
attributes.get("last_name")[0]
|
||||
if attributes.get("last_name") and len(attributes.get("last_name"))
|
||||
else ""
|
||||
)
|
||||
|
||||
super().set_user_data(
|
||||
{
|
||||
"email": email,
|
||||
"user": {
|
||||
"first_name": first_name,
|
||||
"last_name": last_name,
|
||||
"email": email,
|
||||
},
|
||||
}
|
||||
)
|
||||
return self.complete_login_or_signup()
|
||||
|
||||
def logout(self):
|
||||
try:
|
||||
return self.auth.logout()
|
||||
except Exception:
|
||||
return False
|
||||
@@ -6,6 +6,9 @@ from urllib.parse import urlencode
|
||||
import pytz
|
||||
import requests
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Module imports
|
||||
from plane.authentication.adapter.oauth import OauthAdapter
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
@@ -46,7 +49,15 @@ class GitHubOAuthProvider(OauthAdapter):
|
||||
client_id = GITHUB_CLIENT_ID
|
||||
client_secret = GITHUB_CLIENT_SECRET
|
||||
|
||||
redirect_uri = f"""{"https" if request.is_secure() else "http"}://{request.get_host()}/auth/github/callback/"""
|
||||
scheme = (
|
||||
"https"
|
||||
if settings.IS_HEROKU
|
||||
else "https" if request.is_secure() else "http"
|
||||
)
|
||||
|
||||
redirect_uri = (
|
||||
f"""{scheme}://{request.get_host()}/auth/github/callback/"""
|
||||
)
|
||||
url_params = {
|
||||
"client_id": client_id,
|
||||
"redirect_uri": redirect_uri,
|
||||
|
||||
@@ -5,6 +5,9 @@ from urllib.parse import urlencode
|
||||
|
||||
import pytz
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Module imports
|
||||
from plane.authentication.adapter.oauth import OauthAdapter
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
@@ -43,7 +46,15 @@ class GoogleOAuthProvider(OauthAdapter):
|
||||
client_id = GOOGLE_CLIENT_ID
|
||||
client_secret = GOOGLE_CLIENT_SECRET
|
||||
|
||||
redirect_uri = f"""{"https" if request.is_secure() else "http"}://{request.get_host()}/auth/google/callback/"""
|
||||
scheme = (
|
||||
"https"
|
||||
if settings.IS_HEROKU
|
||||
else "https" if request.is_secure() else "http"
|
||||
)
|
||||
|
||||
redirect_uri = (
|
||||
f"""{scheme}://{request.get_host()}/auth/google/callback/"""
|
||||
)
|
||||
url_params = {
|
||||
"client_id": client_id,
|
||||
"scope": self.scope,
|
||||
|
||||
158
apiserver/plane/authentication/provider/oauth/oidc.py
Normal file
158
apiserver/plane/authentication/provider/oauth/oidc.py
Normal file
@@ -0,0 +1,158 @@
|
||||
# Python imports
|
||||
import os
|
||||
from datetime import datetime
|
||||
from urllib.parse import urlencode
|
||||
import pytz
|
||||
|
||||
# Module imports
|
||||
from plane.authentication.adapter.oauth import OauthAdapter
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
from plane.db.models import Account
|
||||
|
||||
|
||||
class OIDCOAuthProvider(OauthAdapter):
|
||||
|
||||
provider = "oidc"
|
||||
scope = "openid email profile"
|
||||
|
||||
def __init__(self, request, code=None, state=None):
|
||||
(
|
||||
OIDC_CLIENT_ID,
|
||||
OIDC_CLIENT_SECRET,
|
||||
OIDC_TOKEN_URL,
|
||||
OIDC_USERINFO_URL,
|
||||
OIDC_AUTHORIZE_URL,
|
||||
) = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "OIDC_CLIENT_ID",
|
||||
"default": os.environ.get("OIDC_CLIENT_ID"),
|
||||
},
|
||||
{
|
||||
"key": "OIDC_CLIENT_SECRET",
|
||||
"default": os.environ.get("OIDC_CLIENT_SECRET"),
|
||||
},
|
||||
{
|
||||
"key": "OIDC_TOKEN_URL",
|
||||
"default": os.environ.get("OIDC_TOKEN_URL"),
|
||||
},
|
||||
{
|
||||
"key": "OIDC_USERINFO_URL",
|
||||
"default": os.environ.get("OIDC_USERINFO_URL"),
|
||||
},
|
||||
{
|
||||
"key": "OIDC_AUTHORIZE_URL",
|
||||
"default": os.environ.get("OIDC_AUTHORIZE_URL"),
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
if not (
|
||||
OIDC_CLIENT_ID
|
||||
and OIDC_CLIENT_SECRET
|
||||
and OIDC_TOKEN_URL
|
||||
and OIDC_USERINFO_URL
|
||||
and OIDC_AUTHORIZE_URL
|
||||
):
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["OIDC_NOT_CONFIGURED"],
|
||||
error_message="OIDC_NOT_CONFIGURED",
|
||||
)
|
||||
|
||||
redirect_uri = (
|
||||
f"{request.scheme}://{request.get_host()}/auth/oidc/callback/"
|
||||
)
|
||||
url_params = {
|
||||
"client_id": OIDC_CLIENT_ID,
|
||||
"response_type": "code",
|
||||
"redirect_uri": redirect_uri,
|
||||
"state": state,
|
||||
"scope": self.scope,
|
||||
}
|
||||
auth_url = f"{OIDC_AUTHORIZE_URL}?{urlencode(url_params)}"
|
||||
super().__init__(
|
||||
request,
|
||||
self.provider,
|
||||
OIDC_CLIENT_ID,
|
||||
self.scope,
|
||||
redirect_uri,
|
||||
auth_url,
|
||||
OIDC_TOKEN_URL,
|
||||
OIDC_USERINFO_URL,
|
||||
OIDC_CLIENT_SECRET,
|
||||
code,
|
||||
)
|
||||
|
||||
def set_token_data(self):
|
||||
data = {
|
||||
"code": self.code,
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret,
|
||||
"redirect_uri": self.redirect_uri,
|
||||
"grant_type": "authorization_code",
|
||||
}
|
||||
token_response = self.get_user_token(
|
||||
data=data,
|
||||
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
||||
)
|
||||
super().set_token_data(
|
||||
{
|
||||
"access_token": token_response.get("access_token"),
|
||||
"refresh_token": token_response.get("refresh_token", None),
|
||||
"access_token_expired_at": (
|
||||
datetime.fromtimestamp(
|
||||
token_response.get("expires_in"),
|
||||
tz=pytz.utc,
|
||||
)
|
||||
if token_response.get("expires_in")
|
||||
else None
|
||||
),
|
||||
"refresh_token_expired_at": (
|
||||
datetime.fromtimestamp(
|
||||
token_response.get("refresh_token_expired_at"),
|
||||
tz=pytz.utc,
|
||||
)
|
||||
if token_response.get("refresh_token_expired_at")
|
||||
else None
|
||||
),
|
||||
"id_token": token_response.get("id_token", ""),
|
||||
}
|
||||
)
|
||||
|
||||
def set_user_data(self):
|
||||
user_info_response = self.get_user_response()
|
||||
user_data = {
|
||||
"email": user_info_response.get("email"),
|
||||
"user": {
|
||||
"avatar": user_info_response.get("picture"),
|
||||
"first_name": user_info_response.get("given_name"),
|
||||
"last_name": user_info_response.get("family_name"),
|
||||
"provider_id": user_info_response.get("sub"),
|
||||
"is_password_autoset": True,
|
||||
},
|
||||
}
|
||||
super().set_user_data(user_data)
|
||||
|
||||
def logout(self, logout_url=None):
|
||||
(OIDC_LOGOUT_URL,) = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "OIDC_LOGOUT_URL",
|
||||
"default": os.environ.get("OIDC_LOGOUT_URL"),
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
account = Account.objects.filter(
|
||||
user=self.request.user, provider=self.provider
|
||||
).first()
|
||||
|
||||
id_token = account.id_token if account and account.id_token else None
|
||||
if OIDC_LOGOUT_URL and id_token and logout_url:
|
||||
return f"{OIDC_LOGOUT_URL}?id_token_hint={id_token}&post_logout_redirect_uri={logout_url}"
|
||||
else:
|
||||
return False
|
||||
@@ -22,6 +22,15 @@ from .views import (
|
||||
SignUpAuthEndpoint,
|
||||
ForgotPasswordSpaceEndpoint,
|
||||
ResetPasswordSpaceEndpoint,
|
||||
# OIDC
|
||||
OIDCAuthInitiateEndpoint,
|
||||
OIDCallbackEndpoint,
|
||||
OIDCLogoutEndpoint,
|
||||
# SAML
|
||||
SAMLAuthInitiateEndpoint,
|
||||
SAMLCallbackEndpoint,
|
||||
SAMLMetadataEndpoint,
|
||||
SAMLLogoutEndpoint,
|
||||
# Space
|
||||
EmailCheckSpaceEndpoint,
|
||||
GitLabCallbackSpaceEndpoint,
|
||||
@@ -218,4 +227,41 @@ urlpatterns = [
|
||||
SetUserPasswordEndpoint.as_view(),
|
||||
name="set-password",
|
||||
),
|
||||
# OIDC
|
||||
path(
|
||||
"oidc/",
|
||||
OIDCAuthInitiateEndpoint.as_view(),
|
||||
name="oidc",
|
||||
),
|
||||
path(
|
||||
"oidc/callback/",
|
||||
OIDCallbackEndpoint.as_view(),
|
||||
name="oidc",
|
||||
),
|
||||
path(
|
||||
"oidc/logout/",
|
||||
OIDCLogoutEndpoint.as_view(),
|
||||
name="oidc",
|
||||
),
|
||||
# SAML
|
||||
path(
|
||||
"saml/",
|
||||
SAMLAuthInitiateEndpoint.as_view(),
|
||||
name="saml",
|
||||
),
|
||||
path(
|
||||
"saml/callback/",
|
||||
SAMLCallbackEndpoint.as_view(),
|
||||
name="saml",
|
||||
),
|
||||
path(
|
||||
"saml/metadata/",
|
||||
SAMLMetadataEndpoint.as_view(),
|
||||
name="saml",
|
||||
),
|
||||
path(
|
||||
"saml/logout/",
|
||||
SAMLLogoutEndpoint.as_view(),
|
||||
name="saml",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,4 +1,9 @@
|
||||
# Python imports
|
||||
import uuid
|
||||
|
||||
# Module imports
|
||||
from .workspace_project_join import process_workspace_project_invitations
|
||||
from plane.bgtasks.event_tracking_task import track_event
|
||||
|
||||
|
||||
def post_user_auth_workflow(
|
||||
@@ -6,4 +11,28 @@ def post_user_auth_workflow(
|
||||
is_signup,
|
||||
request,
|
||||
):
|
||||
# Process workspace project invitations
|
||||
process_workspace_project_invitations(user=user)
|
||||
# track events
|
||||
|
||||
event_mapper = {
|
||||
"email": "Email",
|
||||
"google": "GOOGLE",
|
||||
"magic-code": "Magic link",
|
||||
"github": "GITHUB",
|
||||
}
|
||||
|
||||
track_event.delay(
|
||||
email=user.email,
|
||||
event_name="Sign up" if is_signup else "Sign in",
|
||||
properties={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"user": {"email": user.email, "id": str(user.id)},
|
||||
"device_ctx": {
|
||||
"ip": request.META.get("REMOTE_ADDR", None),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT", None),
|
||||
},
|
||||
"medium": event_mapper.get(user.last_login_medium, "Email"),
|
||||
"first_time": is_signup,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -5,6 +5,7 @@ from plane.db.models import (
|
||||
WorkspaceMemberInvite,
|
||||
)
|
||||
from plane.utils.cache import invalidate_cache_directly
|
||||
from plane.payment.bgtasks.member_sync_task import member_sync_task
|
||||
|
||||
|
||||
def process_workspace_project_invitations(user):
|
||||
@@ -37,6 +38,12 @@ def process_workspace_project_invitations(user):
|
||||
for workspace_member_invite in workspace_member_invites
|
||||
]
|
||||
|
||||
# Sync workspace members
|
||||
[
|
||||
member_sync_task.delay(workspace_member_invite.workspace.slug)
|
||||
for workspace_member_invite in workspace_member_invites
|
||||
]
|
||||
|
||||
# Check if user has any project invites
|
||||
project_member_invites = ProjectMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
@@ -78,6 +85,12 @@ def process_workspace_project_invitations(user):
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Sync workspace members
|
||||
[
|
||||
member_sync_task.delay(project_member_invite.workspace.slug)
|
||||
for project_member_invite in project_member_invites
|
||||
]
|
||||
|
||||
# Delete all the invites
|
||||
workspace_member_invites.delete()
|
||||
project_member_invites.delete()
|
||||
|
||||
@@ -28,6 +28,19 @@ from .app.magic import (
|
||||
MagicSignUpEndpoint,
|
||||
)
|
||||
|
||||
from .app.oidc import (
|
||||
OIDCAuthInitiateEndpoint,
|
||||
OIDCallbackEndpoint,
|
||||
OIDCLogoutEndpoint,
|
||||
)
|
||||
|
||||
from .app.saml import (
|
||||
SAMLAuthInitiateEndpoint,
|
||||
SAMLCallbackEndpoint,
|
||||
SAMLMetadataEndpoint,
|
||||
SAMLLogoutEndpoint,
|
||||
)
|
||||
|
||||
from .app.signout import SignOutAuthEndpoint
|
||||
|
||||
|
||||
|
||||
104
apiserver/plane/authentication/views/app/oidc.py
Normal file
104
apiserver/plane/authentication/views/app/oidc.py
Normal file
@@ -0,0 +1,104 @@
|
||||
# Python imports
|
||||
import uuid
|
||||
from urllib.parse import urlencode, urljoin
|
||||
|
||||
# Django imports
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.views import View
|
||||
from django.contrib.auth import logout
|
||||
|
||||
# Module imports
|
||||
from plane.authentication.provider.oauth.oidc import OIDCOAuthProvider
|
||||
from plane.authentication.utils.workspace_project_join import (
|
||||
process_workspace_project_invitations,
|
||||
)
|
||||
from plane.authentication.utils.redirection_path import get_redirection_path
|
||||
from plane.authentication.utils.login import user_login
|
||||
from plane.license.models import Instance
|
||||
from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
from plane.authentication.utils.host import base_host
|
||||
|
||||
|
||||
class OIDCAuthInitiateEndpoint(View):
|
||||
|
||||
def get(self, request):
|
||||
request.session["host"] = base_host(request=request, is_app=True)
|
||||
next_path = request.GET.get("next_path")
|
||||
if next_path:
|
||||
request.session["next_path"] = str(next_path)
|
||||
try:
|
||||
# Check instance configuration
|
||||
instance = Instance.objects.first()
|
||||
if instance is None or not instance.is_setup_done:
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES[
|
||||
"INSTANCE_NOT_CONFIGURED"
|
||||
],
|
||||
error_message="INSTANCE_NOT_CONFIGURED",
|
||||
)
|
||||
|
||||
state = uuid.uuid4().hex
|
||||
provider = OIDCOAuthProvider(request=request, state=state)
|
||||
request.session["state"] = state
|
||||
auth_url = provider.get_auth_url()
|
||||
return HttpResponseRedirect(auth_url)
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True),
|
||||
"?" + urlencode(params),
|
||||
)
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
|
||||
class OIDCallbackEndpoint(View):
|
||||
|
||||
def get(self, request):
|
||||
code = request.GET.get("code")
|
||||
state = request.GET.get("state")
|
||||
host = request.session.get("host")
|
||||
try:
|
||||
if state != request.session.get("state", ""):
|
||||
raise AuthenticationException(
|
||||
error_code="OIDC_PROVIDER_ERROR",
|
||||
error_message="OIDC_PROVIDER_ERROR",
|
||||
)
|
||||
|
||||
if not code:
|
||||
raise AuthenticationException(
|
||||
error_code="OIDC_PROVIDER_ERROR",
|
||||
error_message="OIDC_PROVIDER_ERROR",
|
||||
)
|
||||
|
||||
provider = OIDCOAuthProvider(
|
||||
request=request,
|
||||
code=code,
|
||||
)
|
||||
user = provider.authenticate()
|
||||
# Login the user and record his device info
|
||||
user_login(request=request, user=user)
|
||||
# Process workspace and project invitations
|
||||
process_workspace_project_invitations(user=user)
|
||||
# Get the redirection path
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
url = urljoin(host, path)
|
||||
return HttpResponseRedirect(url)
|
||||
except AuthenticationException as e:
|
||||
url = urljoin(
|
||||
host,
|
||||
"?" + urlencode(e.get_error_dict()),
|
||||
)
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
|
||||
class OIDCLogoutEndpoint(View):
|
||||
|
||||
def get(self, request):
|
||||
logout(request=request)
|
||||
return HttpResponseRedirect(base_host(request=request, is_app=True))
|
||||
122
apiserver/plane/authentication/views/app/saml.py
Normal file
122
apiserver/plane/authentication/views/app/saml.py
Normal file
@@ -0,0 +1,122 @@
|
||||
# Python imports
|
||||
from urllib.parse import urlencode, urljoin
|
||||
|
||||
# Django imports
|
||||
from django.http import HttpResponseRedirect, HttpResponse
|
||||
from django.views import View
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.utils.decorators import method_decorator
|
||||
|
||||
from django.contrib.auth import logout
|
||||
|
||||
# Module imports
|
||||
from plane.authentication.adapter.saml import SAMLAdapter
|
||||
from plane.authentication.utils.login import user_login
|
||||
from plane.authentication.utils.workspace_project_join import (
|
||||
process_workspace_project_invitations,
|
||||
)
|
||||
from plane.authentication.utils.redirection_path import get_redirection_path
|
||||
from plane.license.models import Instance
|
||||
from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
from plane.authentication.utils.host import base_host
|
||||
|
||||
|
||||
class SAMLAuthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
request.session["host"] = base_host(request=request, is_app=True)
|
||||
next_path = request.GET.get("next_path")
|
||||
if next_path:
|
||||
request.session["next_path"] = str(next_path)
|
||||
|
||||
try:
|
||||
# Check instance configuration
|
||||
instance = Instance.objects.first()
|
||||
if instance is None or not instance.is_setup_done:
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES[
|
||||
"INSTANCE_NOT_CONFIGURED"
|
||||
],
|
||||
error_message="INSTANCE_NOT_CONFIGURED",
|
||||
)
|
||||
# Provider
|
||||
provider = SAMLAdapter(
|
||||
request=request,
|
||||
)
|
||||
# Get the auth url
|
||||
return_url = provider.get_auth_url()
|
||||
return HttpResponseRedirect(return_url)
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True),
|
||||
"?" + urlencode(params),
|
||||
)
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
|
||||
@method_decorator(csrf_exempt, name="dispatch")
|
||||
class SAMLCallbackEndpoint(View):
|
||||
def post(self, request):
|
||||
host = request.session.get("host", "/")
|
||||
try:
|
||||
provider = SAMLAdapter(request=request)
|
||||
user = provider.authenticate()
|
||||
# Login the user and record his device info
|
||||
user_login(request=request, user=user)
|
||||
# Process workspace and project invitations
|
||||
process_workspace_project_invitations(user=user)
|
||||
# Get the redirection path
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
url = urljoin(host, path)
|
||||
return HttpResponseRedirect(url)
|
||||
except AuthenticationException as e:
|
||||
url = urljoin(host, "?" + urlencode(e.get_error_dict()))
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
|
||||
@method_decorator(csrf_exempt, name="dispatch")
|
||||
class SAMLLogoutEndpoint(View):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
logout(request=request)
|
||||
return HttpResponseRedirect(base_host(request=request, is_app=True))
|
||||
|
||||
|
||||
@method_decorator(csrf_exempt, name="dispatch")
|
||||
class SAMLMetadataEndpoint(View):
|
||||
|
||||
def get(self, request):
|
||||
xml_template = f"""<EntityDescriptor xmlns="urn:oasis:names:tc:SAML:2.0:metadata"
|
||||
entityID="{request.scheme}://{request.get_host()}/auth/saml/metadata/">
|
||||
<SPSSODescriptor protocolSupportEnumeration="urn:oasis:names:tc:SAML:2.0:protocol">
|
||||
<AssertionConsumerService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
|
||||
Location="{request.scheme}://{request.get_host()}/auth/saml/callback/"
|
||||
index="1"/>
|
||||
<SingleLogoutService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
|
||||
Location="{request.scheme}://{request.get_host()}/auth/saml/logout/"/>
|
||||
<NameIDFormat>urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress</NameIDFormat>
|
||||
<AttributeConsumingService index="1">
|
||||
<ServiceName xml:lang="en">Plane</ServiceName>
|
||||
<RequestedAttribute Name="user.firstName"
|
||||
FriendlyName="first_name"
|
||||
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
|
||||
isRequired="false"/>
|
||||
<RequestedAttribute Name="user.lastName"
|
||||
FriendlyName="last_name"
|
||||
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
|
||||
isRequired="false"/>
|
||||
<RequestedAttribute Name="user.email"
|
||||
FriendlyName="email"
|
||||
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
|
||||
isRequired="true"/>
|
||||
</AttributeConsumingService>
|
||||
</SPSSODescriptor>
|
||||
</EntityDescriptor>
|
||||
"""
|
||||
return HttpResponse(xml_template, content_type="application/xml")
|
||||
@@ -7,6 +7,8 @@ from django.utils import timezone
|
||||
# Module imports
|
||||
from plane.authentication.utils.host import user_ip, base_host
|
||||
from plane.db.models import User
|
||||
from plane.authentication.provider.oauth.oidc import OIDCOAuthProvider
|
||||
from plane.authentication.adapter.saml import SAMLAdapter
|
||||
|
||||
|
||||
class SignOutAuthEndpoint(View):
|
||||
@@ -18,7 +20,28 @@ class SignOutAuthEndpoint(View):
|
||||
user.last_logout_ip = user_ip(request=request)
|
||||
user.last_logout_time = timezone.now()
|
||||
user.save()
|
||||
# Log the user out
|
||||
|
||||
# Check if the last medium of user is oidc
|
||||
if request.user.last_login_medium == "oidc":
|
||||
provider = OIDCOAuthProvider(
|
||||
request=request,
|
||||
)
|
||||
logout_url = provider.logout(
|
||||
logout_url=f"{base_host(request=request, is_app=True)}/auth/oidc/logout/"
|
||||
)
|
||||
if logout_url:
|
||||
return HttpResponseRedirect(logout_url)
|
||||
|
||||
# Check if the last medium of user is saml
|
||||
if request.user.last_login_medium == "saml":
|
||||
provider = SAMLAdapter(
|
||||
request=request,
|
||||
)
|
||||
logout_url = provider.logout()
|
||||
if logout_url:
|
||||
return HttpResponseRedirect(logout_url)
|
||||
|
||||
# Logout user
|
||||
logout(request)
|
||||
return HttpResponseRedirect(
|
||||
base_host(request=request, is_app=True)
|
||||
|
||||
@@ -2,14 +2,69 @@ from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
from plane.db.models import APIActivityLog
|
||||
from celery import shared_task
|
||||
from django.conf import settings
|
||||
from pymongo import MongoClient
|
||||
from pymongo.errors import BulkWriteError
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
BATCH_SIZE = 3000
|
||||
|
||||
|
||||
@shared_task
|
||||
def delete_api_logs():
|
||||
# Get the logs older than 30 days to delete
|
||||
logs_to_delete = APIActivityLog.objects.filter(
|
||||
created_at__lte=timezone.now() - timedelta(days=30)
|
||||
)
|
||||
|
||||
# Delete the logs
|
||||
logs_to_delete._raw_delete(logs_to_delete.db)
|
||||
if settings.MONGO_DB_URL:
|
||||
# Get the logs older than 30 days to delete
|
||||
logs_to_delete = APIActivityLog.objects.filter(
|
||||
created_at__lte=timezone.now() - timedelta(days=30)
|
||||
)
|
||||
|
||||
# Create a MongoDB client
|
||||
client = MongoClient(settings.MONGO_DB_URL)
|
||||
db = client["plane"]
|
||||
collection = db["api_activity_logs"]
|
||||
|
||||
# Function to insert documents in batches
|
||||
def bulk_insert(docs):
|
||||
try:
|
||||
collection.insert_many(docs)
|
||||
except BulkWriteError as bwe:
|
||||
log_exception(bwe)
|
||||
|
||||
# Prepare the logs for bulk insert
|
||||
def log_generator():
|
||||
batch = []
|
||||
for log in logs_to_delete.iterator():
|
||||
batch.append(
|
||||
{
|
||||
"token_identifier": log.token_identifier,
|
||||
"path": log.path,
|
||||
"method": log.method,
|
||||
"query_params": log.query_params,
|
||||
"headers": log.headers,
|
||||
"body": log.body,
|
||||
"response_body": log.response_body,
|
||||
"response_code": log.response_code,
|
||||
"ip_address": log.ip_address,
|
||||
"user_agent": log.user_agent,
|
||||
"created_at": log.created_at,
|
||||
"updated_at": log.updated_at,
|
||||
"created_by": str(log.created_by_id) if log.created_by_id else None,
|
||||
"updated_by": str(log.updated_by_id) if log.updated_by_id else None,
|
||||
}
|
||||
)
|
||||
# If batch size is reached, yield the batch
|
||||
if len(batch) == BATCH_SIZE:
|
||||
yield batch
|
||||
batch = []
|
||||
|
||||
# Yield the remaining logs
|
||||
if batch:
|
||||
yield batch
|
||||
|
||||
# Upload the logs to MongoDB in batches
|
||||
for batch in log_generator():
|
||||
bulk_insert(batch)
|
||||
|
||||
# Delete the logs
|
||||
logs_to_delete._raw_delete(logs_to_delete.db)
|
||||
|
||||
598
apiserver/plane/bgtasks/create_faker.py
Normal file
598
apiserver/plane/bgtasks/create_faker.py
Normal file
@@ -0,0 +1,598 @@
|
||||
# Python imports
|
||||
import random
|
||||
from datetime import datetime
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Max
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from faker import Faker
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import (
|
||||
Workspace,
|
||||
WorkspaceMember,
|
||||
User,
|
||||
Project,
|
||||
ProjectMember,
|
||||
State,
|
||||
Label,
|
||||
Cycle,
|
||||
Module,
|
||||
Issue,
|
||||
IssueSequence,
|
||||
IssueAssignee,
|
||||
IssueLabel,
|
||||
IssueActivity,
|
||||
CycleIssue,
|
||||
ModuleIssue,
|
||||
)
|
||||
|
||||
|
||||
def create_workspace_members(workspace, members):
|
||||
members = User.objects.filter(email__in=members)
|
||||
|
||||
_ = WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
workspace=workspace,
|
||||
member=member,
|
||||
role=20,
|
||||
)
|
||||
for member in members
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
def create_project(workspace, user_id):
|
||||
fake = Faker()
|
||||
name = fake.name()
|
||||
project = Project.objects.create(
|
||||
workspace=workspace,
|
||||
name=name,
|
||||
identifier=name[
|
||||
: random.randint(2, 12 if len(name) - 1 >= 12 else len(name) - 1)
|
||||
].upper(),
|
||||
created_by_id=user_id,
|
||||
)
|
||||
|
||||
# Add current member as project member
|
||||
_ = ProjectMember.objects.create(
|
||||
project=project,
|
||||
member_id=user_id,
|
||||
role=20,
|
||||
)
|
||||
|
||||
return project
|
||||
|
||||
|
||||
def create_project_members(workspace, project, members):
|
||||
members = User.objects.filter(email__in=members)
|
||||
|
||||
_ = ProjectMember.objects.bulk_create(
|
||||
[
|
||||
ProjectMember(
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
member=member,
|
||||
role=20,
|
||||
sort_order=random.randint(0, 65535),
|
||||
)
|
||||
for member in members
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
def create_states(workspace, project, user_id):
|
||||
states = [
|
||||
{
|
||||
"name": "Backlog",
|
||||
"color": "#A3A3A3",
|
||||
"sequence": 15000,
|
||||
"group": "backlog",
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"name": "Todo",
|
||||
"color": "#3A3A3A",
|
||||
"sequence": 25000,
|
||||
"group": "unstarted",
|
||||
},
|
||||
{
|
||||
"name": "In Progress",
|
||||
"color": "#F59E0B",
|
||||
"sequence": 35000,
|
||||
"group": "started",
|
||||
},
|
||||
{
|
||||
"name": "Done",
|
||||
"color": "#16A34A",
|
||||
"sequence": 45000,
|
||||
"group": "completed",
|
||||
},
|
||||
{
|
||||
"name": "Cancelled",
|
||||
"color": "#EF4444",
|
||||
"sequence": 55000,
|
||||
"group": "cancelled",
|
||||
},
|
||||
]
|
||||
|
||||
states = State.objects.bulk_create(
|
||||
[
|
||||
State(
|
||||
name=state["name"],
|
||||
color=state["color"],
|
||||
project=project,
|
||||
sequence=state["sequence"],
|
||||
workspace=workspace,
|
||||
group=state["group"],
|
||||
default=state.get("default", False),
|
||||
created_by_id=user_id,
|
||||
)
|
||||
for state in states
|
||||
]
|
||||
)
|
||||
|
||||
return states
|
||||
|
||||
|
||||
def create_labels(workspace, project, user_id):
|
||||
fake = Faker()
|
||||
Faker.seed(0)
|
||||
|
||||
return Label.objects.bulk_create(
|
||||
[
|
||||
Label(
|
||||
name=fake.color_name(),
|
||||
color=fake.hex_color(),
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
created_by_id=user_id,
|
||||
sort_order=random.randint(0, 65535),
|
||||
)
|
||||
for _ in range(0, 50)
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
|
||||
def create_cycles(workspace, project, user_id, cycle_count):
|
||||
fake = Faker()
|
||||
Faker.seed(0)
|
||||
|
||||
cycles = []
|
||||
used_date_ranges = set() # Track used date ranges
|
||||
|
||||
while len(cycles) <= cycle_count:
|
||||
# Generate a start date, allowing for None
|
||||
start_date_option = [None, fake.date_this_year()]
|
||||
start_date = start_date_option[random.randint(0, 1)]
|
||||
|
||||
# Initialize end_date based on start_date
|
||||
end_date = (
|
||||
None
|
||||
if start_date is None
|
||||
else fake.date_between_dates(
|
||||
date_start=start_date,
|
||||
date_end=datetime.now().date().replace(month=12, day=31),
|
||||
)
|
||||
)
|
||||
|
||||
# Ensure end_date is strictly after start_date if start_date is not None
|
||||
while start_date is not None and (
|
||||
end_date <= start_date
|
||||
or (start_date, end_date) in used_date_ranges
|
||||
):
|
||||
end_date = fake.date_this_year()
|
||||
|
||||
# Add the unique date range to the set
|
||||
(
|
||||
used_date_ranges.add((start_date, end_date))
|
||||
if (end_date is not None and start_date is not None)
|
||||
else None
|
||||
)
|
||||
|
||||
# Append the cycle with unique date range
|
||||
cycles.append(
|
||||
Cycle(
|
||||
name=fake.name(),
|
||||
owned_by_id=user_id,
|
||||
sort_order=random.randint(0, 65535),
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
)
|
||||
)
|
||||
|
||||
return Cycle.objects.bulk_create(cycles, ignore_conflicts=True)
|
||||
|
||||
|
||||
def create_modules(workspace, project, user_id, module_count):
|
||||
fake = Faker()
|
||||
Faker.seed(0)
|
||||
|
||||
modules = []
|
||||
for _ in range(0, module_count):
|
||||
start_date = [None, fake.date_this_year()][random.randint(0, 1)]
|
||||
end_date = (
|
||||
None
|
||||
if start_date is None
|
||||
else fake.date_between_dates(
|
||||
date_start=start_date,
|
||||
date_end=datetime.now().date().replace(month=12, day=31),
|
||||
)
|
||||
)
|
||||
|
||||
modules.append(
|
||||
Module(
|
||||
name=fake.name(),
|
||||
sort_order=random.randint(0, 65535),
|
||||
start_date=start_date,
|
||||
target_date=end_date,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
)
|
||||
)
|
||||
|
||||
return Module.objects.bulk_create(modules, ignore_conflicts=True)
|
||||
|
||||
|
||||
def create_issues(workspace, project, user_id, issue_count):
|
||||
fake = Faker()
|
||||
Faker.seed(0)
|
||||
|
||||
states = State.objects.values_list("id", flat=True)
|
||||
creators = ProjectMember.objects.values_list("member_id", flat=True)
|
||||
|
||||
issues = []
|
||||
|
||||
# Get the maximum sequence_id
|
||||
last_id = IssueSequence.objects.filter(
|
||||
project=project,
|
||||
).aggregate(
|
||||
largest=Max("sequence")
|
||||
)["largest"]
|
||||
|
||||
last_id = 1 if last_id is None else last_id + 1
|
||||
|
||||
# Get the maximum sort order
|
||||
largest_sort_order = Issue.objects.filter(
|
||||
project=project,
|
||||
state_id=states[random.randint(0, len(states) - 1)],
|
||||
).aggregate(largest=Max("sort_order"))["largest"]
|
||||
|
||||
largest_sort_order = (
|
||||
65535 if largest_sort_order is None else largest_sort_order + 10000
|
||||
)
|
||||
|
||||
for _ in range(0, issue_count):
|
||||
start_date = [None, fake.date_this_year()][random.randint(0, 1)]
|
||||
end_date = (
|
||||
None
|
||||
if start_date is None
|
||||
else fake.date_between_dates(
|
||||
date_start=start_date,
|
||||
date_end=datetime.now().date().replace(month=12, day=31),
|
||||
)
|
||||
)
|
||||
|
||||
sentence = fake.sentence()
|
||||
issues.append(
|
||||
Issue(
|
||||
state_id=states[random.randint(0, len(states) - 1)],
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
name=sentence[:254],
|
||||
description_html=f"<p>{sentence}</p>",
|
||||
description_stripped=sentence,
|
||||
sequence_id=last_id,
|
||||
sort_order=largest_sort_order,
|
||||
start_date=start_date,
|
||||
target_date=end_date,
|
||||
priority=["urgent", "high", "medium", "low", "none"][
|
||||
random.randint(0, 4)
|
||||
],
|
||||
created_by_id=creators[random.randint(0, len(creators) - 1)],
|
||||
)
|
||||
)
|
||||
|
||||
largest_sort_order = largest_sort_order + random.randint(0, 1000)
|
||||
last_id = last_id + 1
|
||||
|
||||
issues = Issue.objects.bulk_create(
|
||||
issues, ignore_conflicts=True, batch_size=1000
|
||||
)
|
||||
# Sequences
|
||||
_ = IssueSequence.objects.bulk_create(
|
||||
[
|
||||
IssueSequence(
|
||||
issue=issue,
|
||||
sequence=issue.sequence_id,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
)
|
||||
for issue in issues
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
# Track the issue activities
|
||||
IssueActivity.objects.bulk_create(
|
||||
[
|
||||
IssueActivity(
|
||||
issue=issue,
|
||||
actor_id=user_id,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
comment="created the issue",
|
||||
verb="created",
|
||||
created_by_id=user_id,
|
||||
)
|
||||
for issue in issues
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
def create_issue_parent(workspace, project, user_id, issue_count):
|
||||
|
||||
parent_count = issue_count / 4
|
||||
|
||||
parent_issues = Issue.objects.filter(project=project).values_list(
|
||||
"id", flat=True
|
||||
)[: int(parent_count)]
|
||||
sub_issues = Issue.objects.filter(project=project).exclude(
|
||||
pk__in=parent_issues
|
||||
)[: int(issue_count / 2)]
|
||||
|
||||
bulk_sub_issues = []
|
||||
for sub_issue in sub_issues:
|
||||
sub_issue.parent_id = parent_issues[
|
||||
random.randint(0, int(parent_count - 1))
|
||||
]
|
||||
|
||||
Issue.objects.bulk_update(bulk_sub_issues, ["parent"], batch_size=1000)
|
||||
|
||||
|
||||
def create_issue_assignees(workspace, project, user_id, issue_count):
|
||||
# assignees
|
||||
assignees = ProjectMember.objects.filter(project=project).values_list(
|
||||
"member_id", flat=True
|
||||
)
|
||||
issues = random.sample(
|
||||
list(
|
||||
Issue.objects.filter(project=project).values_list("id", flat=True)
|
||||
),
|
||||
int(issue_count / 2),
|
||||
)
|
||||
|
||||
# Bulk issue
|
||||
bulk_issue_assignees = []
|
||||
for issue in issues:
|
||||
for assignee in random.sample(
|
||||
list(assignees), random.randint(0, len(assignees) - 1)
|
||||
):
|
||||
bulk_issue_assignees.append(
|
||||
IssueAssignee(
|
||||
issue_id=issue,
|
||||
assignee_id=assignee,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
)
|
||||
)
|
||||
|
||||
# Issue assignees
|
||||
IssueAssignee.objects.bulk_create(
|
||||
bulk_issue_assignees, batch_size=1000, ignore_conflicts=True
|
||||
)
|
||||
|
||||
|
||||
def create_issue_labels(workspace, project, user_id, issue_count):
|
||||
# assignees
|
||||
labels = Label.objects.filter(project=project).values_list("id", flat=True)
|
||||
issues = random.sample(
|
||||
list(
|
||||
Issue.objects.filter(project=project).values_list("id", flat=True)
|
||||
),
|
||||
int(issue_count / 2),
|
||||
)
|
||||
|
||||
# Bulk issue
|
||||
bulk_issue_labels = []
|
||||
for issue in issues:
|
||||
for label in random.sample(
|
||||
list(labels), random.randint(0, len(labels) - 1)
|
||||
):
|
||||
bulk_issue_labels.append(
|
||||
IssueLabel(
|
||||
issue_id=issue,
|
||||
label_id=label,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
)
|
||||
)
|
||||
|
||||
# Issue assignees
|
||||
IssueLabel.objects.bulk_create(
|
||||
bulk_issue_labels, batch_size=1000, ignore_conflicts=True
|
||||
)
|
||||
|
||||
|
||||
def create_cycle_issues(workspace, project, user_id, issue_count):
|
||||
# assignees
|
||||
cycles = Cycle.objects.filter(project=project).values_list("id", flat=True)
|
||||
issues = random.sample(
|
||||
list(
|
||||
Issue.objects.filter(project=project).values_list("id", flat=True)
|
||||
),
|
||||
int(issue_count / 2),
|
||||
)
|
||||
|
||||
# Bulk issue
|
||||
bulk_cycle_issues = []
|
||||
for issue in issues:
|
||||
cycle = cycles[random.randint(0, len(cycles) - 1)]
|
||||
bulk_cycle_issues.append(
|
||||
CycleIssue(
|
||||
cycle_id=cycle,
|
||||
issue_id=issue,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
)
|
||||
)
|
||||
|
||||
# Issue assignees
|
||||
CycleIssue.objects.bulk_create(
|
||||
bulk_cycle_issues, batch_size=1000, ignore_conflicts=True
|
||||
)
|
||||
|
||||
|
||||
def create_module_issues(workspace, project, user_id, issue_count):
|
||||
# assignees
|
||||
modules = Module.objects.filter(project=project).values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
issues = random.sample(
|
||||
list(
|
||||
Issue.objects.filter(project=project).values_list("id", flat=True)
|
||||
),
|
||||
int(issue_count / 2),
|
||||
)
|
||||
|
||||
# Bulk issue
|
||||
bulk_module_issues = []
|
||||
for issue in issues:
|
||||
module = modules[random.randint(0, len(modules) - 1)]
|
||||
bulk_module_issues.append(
|
||||
ModuleIssue(
|
||||
module_id=module,
|
||||
issue_id=issue,
|
||||
project=project,
|
||||
workspace=workspace,
|
||||
)
|
||||
)
|
||||
# Issue assignees
|
||||
ModuleIssue.objects.bulk_create(
|
||||
bulk_module_issues, batch_size=1000, ignore_conflicts=True
|
||||
)
|
||||
|
||||
|
||||
@shared_task
|
||||
def create_fake_data(
|
||||
slug, email, members, issue_count, cycle_count, module_count
|
||||
):
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
user = User.objects.get(email=email)
|
||||
user_id = user.id
|
||||
|
||||
# create workspace members
|
||||
print("creating workspace members")
|
||||
create_workspace_members(workspace=workspace, members=members)
|
||||
print("Done creating workspace members")
|
||||
|
||||
# Create a project
|
||||
print("Creating project")
|
||||
project = create_project(workspace=workspace, user_id=user_id)
|
||||
print("Done creating projects")
|
||||
|
||||
# create project members
|
||||
print("Creating project members")
|
||||
create_project_members(
|
||||
workspace=workspace, project=project, members=members
|
||||
)
|
||||
print("Done creating project members")
|
||||
|
||||
# Create states
|
||||
print("Creating states")
|
||||
_ = create_states(workspace=workspace, project=project, user_id=user_id)
|
||||
print("Done creating states")
|
||||
|
||||
# Create labels
|
||||
print("Creating labels")
|
||||
_ = create_labels(workspace=workspace, project=project, user_id=user_id)
|
||||
print("Done creating labels")
|
||||
|
||||
# create cycles
|
||||
print("Creating cycles")
|
||||
_ = create_cycles(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
cycle_count=cycle_count,
|
||||
)
|
||||
print("Done creating cycles")
|
||||
|
||||
# create modules
|
||||
print("Creating modules")
|
||||
_ = create_modules(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
module_count=module_count,
|
||||
)
|
||||
print("Done creating modules")
|
||||
|
||||
print("Creating issues")
|
||||
create_issues(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
issue_count=issue_count,
|
||||
)
|
||||
print("Done creating issues")
|
||||
|
||||
print("Creating parent and sub issues")
|
||||
create_issue_parent(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
issue_count=issue_count,
|
||||
)
|
||||
print("Done creating parent and sub issues")
|
||||
|
||||
print("Creating issue assignees")
|
||||
create_issue_assignees(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
issue_count=issue_count,
|
||||
)
|
||||
print("Done creating issue assignees")
|
||||
|
||||
print("Creating issue labels")
|
||||
create_issue_labels(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
issue_count=issue_count,
|
||||
)
|
||||
print("Done creating issue labels")
|
||||
|
||||
print("Creating cycle issues")
|
||||
create_cycle_issues(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
issue_count=issue_count,
|
||||
)
|
||||
print("Done creating cycle issues")
|
||||
|
||||
print("Creating module issues")
|
||||
create_module_issues(
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
user_id=user_id,
|
||||
issue_count=issue_count,
|
||||
)
|
||||
print("Done creating module issues")
|
||||
|
||||
return
|
||||
@@ -1,5 +1,4 @@
|
||||
import os
|
||||
import uuid
|
||||
|
||||
# third party imports
|
||||
from celery import shared_task
|
||||
@@ -30,7 +29,7 @@ def posthogConfiguration():
|
||||
|
||||
|
||||
@shared_task
|
||||
def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
|
||||
def track_event(email, event_name, properties):
|
||||
try:
|
||||
POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration()
|
||||
|
||||
@@ -39,43 +38,7 @@ def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
|
||||
posthog.capture(
|
||||
email,
|
||||
event=event_name,
|
||||
properties={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"user": {"email": email, "id": str(user)},
|
||||
"device_ctx": {
|
||||
"ip": ip,
|
||||
"user_agent": user_agent,
|
||||
},
|
||||
"medium": medium,
|
||||
"first_time": first_time,
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return
|
||||
|
||||
|
||||
@shared_task
|
||||
def workspace_invite_event(
|
||||
user, email, user_agent, ip, event_name, accepted_from
|
||||
):
|
||||
try:
|
||||
POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration()
|
||||
|
||||
if POSTHOG_API_KEY and POSTHOG_HOST:
|
||||
posthog = Posthog(POSTHOG_API_KEY, host=POSTHOG_HOST)
|
||||
posthog.capture(
|
||||
email,
|
||||
event=event_name,
|
||||
properties={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"user": {"email": email, "id": str(user)},
|
||||
"device_ctx": {
|
||||
"ip": ip,
|
||||
"user_agent": user_agent,
|
||||
},
|
||||
"accepted_from": accepted_from,
|
||||
},
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
216
apiserver/plane/bgtasks/importer_task.py
Normal file
216
apiserver/plane/bgtasks/importer_task.py
Normal file
@@ -0,0 +1,216 @@
|
||||
# Python imports
|
||||
import json
|
||||
import requests
|
||||
import uuid
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.contrib.auth.hashers import make_password
|
||||
|
||||
# Third Party imports
|
||||
from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.app.serializers import ImporterSerializer
|
||||
from plane.db.models import (
|
||||
Importer,
|
||||
WorkspaceMember,
|
||||
GithubRepositorySync,
|
||||
GithubRepository,
|
||||
ProjectMember,
|
||||
WorkspaceIntegration,
|
||||
Label,
|
||||
User,
|
||||
IssueUserProperty,
|
||||
UserNotificationPreference,
|
||||
)
|
||||
|
||||
from plane.bgtasks.user_welcome_task import send_welcome_slack
|
||||
from plane.payment.bgtasks.member_sync_task import member_sync_task
|
||||
|
||||
|
||||
@shared_task
|
||||
def service_importer(service, importer_id):
|
||||
try:
|
||||
importer = Importer.objects.get(pk=importer_id)
|
||||
importer.status = "processing"
|
||||
importer.save()
|
||||
|
||||
users = importer.data.get("users", [])
|
||||
|
||||
# Check if we need to import users as well
|
||||
if len(users):
|
||||
# For all invited users create the users
|
||||
new_users = User.objects.bulk_create(
|
||||
[
|
||||
User(
|
||||
email=user.get("email").strip().lower(),
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
)
|
||||
for user in users
|
||||
if user.get("import", False) == "invite"
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
_ = UserNotificationPreference.objects.bulk_create(
|
||||
[UserNotificationPreference(user=user) for user in new_users],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
_ = [
|
||||
send_welcome_slack.delay(
|
||||
str(user.id),
|
||||
True,
|
||||
f"{user.email} was imported to Plane from {service}",
|
||||
)
|
||||
for user in new_users
|
||||
]
|
||||
|
||||
workspace_users = User.objects.filter(
|
||||
email__in=[
|
||||
user.get("email").strip().lower()
|
||||
for user in users
|
||||
if user.get("import", False) == "invite"
|
||||
or user.get("import", False) == "map"
|
||||
]
|
||||
)
|
||||
|
||||
# Check if any of the users are already member of workspace
|
||||
_ = WorkspaceMember.objects.filter(
|
||||
member__in=[user for user in workspace_users],
|
||||
workspace_id=importer.workspace_id,
|
||||
).update(is_active=True)
|
||||
|
||||
# Add new users to Workspace and project automatically
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
member=user,
|
||||
workspace_id=importer.workspace_id,
|
||||
created_by=importer.created_by,
|
||||
)
|
||||
for user in workspace_users
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Sync workspace members
|
||||
member_sync_task(importer.workspace.slug)
|
||||
|
||||
ProjectMember.objects.bulk_create(
|
||||
[
|
||||
ProjectMember(
|
||||
project_id=importer.project_id,
|
||||
workspace_id=importer.workspace_id,
|
||||
member=user,
|
||||
created_by=importer.created_by,
|
||||
)
|
||||
for user in workspace_users
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
IssueUserProperty.objects.bulk_create(
|
||||
[
|
||||
IssueUserProperty(
|
||||
project_id=importer.project_id,
|
||||
workspace_id=importer.workspace_id,
|
||||
user=user,
|
||||
created_by=importer.created_by,
|
||||
)
|
||||
for user in workspace_users
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Check if sync config is on for github importers
|
||||
if service == "github" and importer.config.get("sync", False):
|
||||
name = importer.metadata.get("name", False)
|
||||
url = importer.metadata.get("url", False)
|
||||
config = importer.metadata.get("config", {})
|
||||
owner = importer.metadata.get("owner", False)
|
||||
repository_id = importer.metadata.get("repository_id", False)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
workspace_id=importer.workspace_id,
|
||||
integration__provider="github",
|
||||
)
|
||||
|
||||
# Delete the old repository object
|
||||
GithubRepositorySync.objects.filter(
|
||||
project_id=importer.project_id
|
||||
).delete()
|
||||
GithubRepository.objects.filter(
|
||||
project_id=importer.project_id
|
||||
).delete()
|
||||
|
||||
# Create a Label for github
|
||||
label = Label.objects.filter(
|
||||
name="GitHub", project_id=importer.project_id
|
||||
).first()
|
||||
|
||||
if label is None:
|
||||
label = Label.objects.create(
|
||||
name="GitHub",
|
||||
project_id=importer.project_id,
|
||||
description="Label to sync Plane issues with GitHub issues",
|
||||
color="#003773",
|
||||
)
|
||||
# Create repository
|
||||
repo = GithubRepository.objects.create(
|
||||
name=name,
|
||||
url=url,
|
||||
config=config,
|
||||
repository_id=repository_id,
|
||||
owner=owner,
|
||||
project_id=importer.project_id,
|
||||
)
|
||||
|
||||
# Create repo sync
|
||||
_ = GithubRepositorySync.objects.create(
|
||||
repository=repo,
|
||||
workspace_integration=workspace_integration,
|
||||
actor=workspace_integration.actor,
|
||||
credentials=importer.data.get("credentials", {}),
|
||||
project_id=importer.project_id,
|
||||
label=label,
|
||||
)
|
||||
|
||||
# Add bot as a member in the project
|
||||
_ = ProjectMember.objects.get_or_create(
|
||||
member=workspace_integration.actor,
|
||||
role=20,
|
||||
project_id=importer.project_id,
|
||||
)
|
||||
|
||||
if settings.PROXY_BASE_URL:
|
||||
headers = {"Content-Type": "application/json"}
|
||||
import_data_json = json.dumps(
|
||||
ImporterSerializer(importer).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
_ = requests.post(
|
||||
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(importer.workspace_id)}/projects/{str(importer.project_id)}/importers/{str(service)}/",
|
||||
json=import_data_json,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
return
|
||||
except Exception as e:
|
||||
importer = Importer.objects.get(pk=importer_id)
|
||||
importer.status = "failed"
|
||||
importer.save()
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
@@ -1,13 +1,11 @@
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
import requests
|
||||
|
||||
# Third Party imports
|
||||
from celery import shared_task
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.utils import timezone
|
||||
|
||||
@@ -1757,3 +1755,4 @@ def issue_activity(
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return
|
||||
|
||||
|
||||
@@ -42,8 +42,9 @@ def page_transaction(new_value, old_value, page_id):
|
||||
new_transactions = []
|
||||
deleted_transaction_ids = set()
|
||||
|
||||
# TODO - Add "issue-embed-component", "img", "todo" components
|
||||
components = ["mention-component"]
|
||||
# TODO - Add "img", "todo" components
|
||||
components = ["mention-component", "issue-embed-component"]
|
||||
|
||||
for component in components:
|
||||
old_mentions = extract_components(old_value, component)
|
||||
new_mentions = extract_components(new_value, component)
|
||||
@@ -57,7 +58,11 @@ def page_transaction(new_value, old_value, page_id):
|
||||
transaction=mention["id"],
|
||||
page_id=page_id,
|
||||
entity_identifier=mention["entity_identifier"],
|
||||
entity_name=mention["entity_name"],
|
||||
entity_name=(
|
||||
mention["entity_name"]
|
||||
if mention["entity_name"]
|
||||
else "issue"
|
||||
),
|
||||
workspace_id=page.workspace_id,
|
||||
created_at=timezone.now(),
|
||||
updated_at=timezone.now(),
|
||||
|
||||
36
apiserver/plane/bgtasks/user_welcome_task.py
Normal file
36
apiserver/plane/bgtasks/user_welcome_task.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
from slack_sdk import WebClient
|
||||
from slack_sdk.errors import SlackApiError
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import User
|
||||
|
||||
|
||||
@shared_task
|
||||
def send_welcome_slack(user_id, created, message):
|
||||
try:
|
||||
instance = User.objects.get(pk=user_id)
|
||||
|
||||
if created and not instance.is_bot:
|
||||
# Send message on slack as well
|
||||
if settings.SLACK_BOT_TOKEN:
|
||||
client = WebClient(token=settings.SLACK_BOT_TOKEN)
|
||||
try:
|
||||
_ = client.chat_postMessage(
|
||||
channel="#trackers",
|
||||
text=message,
|
||||
)
|
||||
except SlackApiError as e:
|
||||
print(f"Got an error: {e.response['error']}")
|
||||
return
|
||||
except Exception as e:
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
@@ -3,8 +3,11 @@ import logging
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from slack_sdk import WebClient
|
||||
from slack_sdk.errors import SlackApiError
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.html import strip_tags
|
||||
@@ -15,6 +18,18 @@ from plane.license.utils.instance_value import get_email_configuration
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
|
||||
def push_updated_to_slack(workspace, workspace_member_invite):
|
||||
# Send message on slack as well
|
||||
client = WebClient(token=settings.SLACK_BOT_TOKEN)
|
||||
try:
|
||||
_ = client.chat_postMessage(
|
||||
channel="#trackers",
|
||||
text=f"{workspace_member_invite.email} has been invited to {workspace.name} as a {workspace_member_invite.role}",
|
||||
)
|
||||
except SlackApiError as e:
|
||||
print(f"Got an error: {e.response['error']}")
|
||||
|
||||
|
||||
@shared_task
|
||||
def workspace_invitation(email, workspace_id, token, current_site, invitor):
|
||||
try:
|
||||
@@ -80,6 +95,10 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent succesfully")
|
||||
|
||||
# Send message on slack as well
|
||||
if settings.SLACK_BOT_TOKEN:
|
||||
push_updated_to_slack(workspace, workspace_member_invite)
|
||||
|
||||
return
|
||||
except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist) as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -40,6 +40,14 @@ app.conf.beat_schedule = {
|
||||
"task": "plane.bgtasks.deletion_task.hard_delete",
|
||||
"schedule": crontab(hour=0, minute=0),
|
||||
},
|
||||
"check-every-12-hr-instance-version": {
|
||||
"task": "plane.license.bgtasks.version_check_task.version_check",
|
||||
"schedule": crontab(hour="*/12", minute=0),
|
||||
},
|
||||
"check-every-day-to-sync-workspace-members": {
|
||||
"task": "plane.payment.bgtasks.workspace_subscription_sync_task.schedule_workspace_billing_task",
|
||||
"schedule": crontab(hour=0, minute=0),
|
||||
},
|
||||
}
|
||||
|
||||
# Load task modules from all registered Django app configs.
|
||||
|
||||
0
apiserver/plane/db/backfills/__init__.py
Normal file
0
apiserver/plane/db/backfills/__init__.py
Normal file
63
apiserver/plane/db/backfills/backfill_0070_page_versions.py
Normal file
63
apiserver/plane/db/backfills/backfill_0070_page_versions.py
Normal file
@@ -0,0 +1,63 @@
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import PageVersion, IssueType, Issue
|
||||
|
||||
|
||||
@shared_task
|
||||
def backfill_issue_type_task(projects):
|
||||
# Create the issue types for all projects
|
||||
IssueType.objects.bulk_create(
|
||||
[
|
||||
IssueType(
|
||||
name="Task",
|
||||
description="A task that needs to be completed.",
|
||||
project_id=project["id"],
|
||||
workspace_id=project["workspace_id"],
|
||||
)
|
||||
for project in projects
|
||||
],
|
||||
batch_size=1000,
|
||||
)
|
||||
|
||||
# Update the issue type for all existing issues
|
||||
issue_types = {
|
||||
str(issue_type["project_id"]): str(issue_type["id"])
|
||||
for issue_type in IssueType.objects.filter(
|
||||
project_id__in=[project["id"] for project in projects]
|
||||
).values("id", "project_id")
|
||||
}
|
||||
# Update the issue type for all existing issues
|
||||
bulk_issues = []
|
||||
for issue in Issue.objects.filter(
|
||||
project_id__in=[project["id"] for project in projects]
|
||||
):
|
||||
issue.type_id = issue_types[str(issue.project_id)]
|
||||
bulk_issues.append(issue)
|
||||
|
||||
# Update the issue type for all existing issues
|
||||
Issue.objects.bulk_update(bulk_issues, ["type_id"], batch_size=1000)
|
||||
|
||||
|
||||
@shared_task
|
||||
def backfill_page_versions_task(pages):
|
||||
# Create the page versions for all pages
|
||||
PageVersion.objects.bulk_create(
|
||||
[
|
||||
PageVersion(
|
||||
page_id=page["id"],
|
||||
workspace_id=page["workspace_id"],
|
||||
last_saved_at=timezone.now(),
|
||||
owned_by_id=page["owned_by_id"],
|
||||
description_binary=page["description_binary"],
|
||||
description_html=page["description_html"],
|
||||
description_stripped=page["description_stripped"],
|
||||
)
|
||||
for page in pages
|
||||
],
|
||||
batch_size=1000,
|
||||
)
|
||||
79
apiserver/plane/db/management/commands/faker.py
Normal file
79
apiserver/plane/db/management/commands/faker.py
Normal file
@@ -0,0 +1,79 @@
|
||||
# Django imports
|
||||
from typing import Any
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import User, Workspace, WorkspaceMember
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Create dump issues, cycles etc. for a project in a given workspace"
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> str | None:
|
||||
|
||||
try:
|
||||
workspace_name = input("Workspace Name: ")
|
||||
workspace_slug = input("Workspace slug: ")
|
||||
|
||||
if workspace_slug == "":
|
||||
raise CommandError("Workspace slug is required")
|
||||
|
||||
if Workspace.objects.filter(slug=workspace_slug).exists():
|
||||
raise CommandError("Workspace already exists")
|
||||
|
||||
creator = input("Your email: ")
|
||||
|
||||
if (
|
||||
creator == ""
|
||||
or not User.objects.filter(email=creator).exists()
|
||||
):
|
||||
raise CommandError(
|
||||
"User email is required and should be existing in Database"
|
||||
)
|
||||
|
||||
user = User.objects.get(email=creator)
|
||||
|
||||
members = input("Enter Member emails (comma separated): ")
|
||||
members = members.split(",") if members != "" else []
|
||||
|
||||
issue_count = int(
|
||||
input("Number of issues to be created: ")
|
||||
)
|
||||
cycle_count = int(
|
||||
input("Number of cycles to be created: ")
|
||||
)
|
||||
module_count = int(
|
||||
input("Number of modules to be created: ")
|
||||
)
|
||||
|
||||
# Create workspace
|
||||
workspace = Workspace.objects.create(
|
||||
slug=workspace_slug,
|
||||
name=workspace_name,
|
||||
owner=user,
|
||||
)
|
||||
# Create workspace member
|
||||
WorkspaceMember.objects.create(
|
||||
workspace=workspace, role=20, member=user
|
||||
)
|
||||
|
||||
from plane.bgtasks.create_faker import create_fake_data
|
||||
|
||||
create_fake_data.delay(
|
||||
slug=workspace_slug,
|
||||
email=creator,
|
||||
members=members,
|
||||
issue_count=issue_count,
|
||||
cycle_count=cycle_count,
|
||||
module_count=module_count,
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Data is pushed to the queue")
|
||||
)
|
||||
return
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f"Command errored out {str(e)}")
|
||||
)
|
||||
return
|
||||
75
apiserver/plane/db/management/commands/license_check.py
Normal file
75
apiserver/plane/db/management/commands/license_check.py
Normal file
@@ -0,0 +1,75 @@
|
||||
# Python imports
|
||||
import os
|
||||
import requests
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
# Django imports
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Check the license of the instance with Prime Server"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
try:
|
||||
# Verify the license key
|
||||
prime_host = os.environ.get("PRIME_HOST", False)
|
||||
machine_signature = os.environ.get("MACHINE_SIGNATURE", False)
|
||||
license_key = os.environ.get("LICENSE_KEY", False)
|
||||
deploy_platform = os.environ.get("DEPLOY_PLATFORM", False)
|
||||
domain = os.environ.get("LICENSE_DOMAIN", False)
|
||||
license_version = os.environ.get("LICENSE_VERSION", False)
|
||||
|
||||
# If any of the above is not provided raise a command error
|
||||
if not prime_host or not machine_signature or not license_key:
|
||||
raise CommandError("Invalid license key provided")
|
||||
|
||||
# Check with the license server
|
||||
response = requests.post(
|
||||
f"{prime_host}/api/validate/",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Api-Key": str(license_key),
|
||||
"X-Machine-Signature": str(machine_signature),
|
||||
},
|
||||
json={
|
||||
"machine_signature": str(machine_signature),
|
||||
"domain": domain,
|
||||
},
|
||||
)
|
||||
|
||||
# Check if status code is 204
|
||||
if response.status_code == 204:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("License key verified successfully")
|
||||
)
|
||||
return
|
||||
|
||||
elif response.status_code == 400:
|
||||
if deploy_platform == "KUBERNETES":
|
||||
response = requests.post(
|
||||
f"{prime_host}/api/kubernetes-setup/",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Api-Key": str(license_key),
|
||||
"X-Machine-Signature": str(machine_signature),
|
||||
},
|
||||
json={
|
||||
"machine_signature": str(machine_signature),
|
||||
"domain": domain,
|
||||
"version": license_version,
|
||||
},
|
||||
)
|
||||
response.raise_for_status()
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Instance created successfully")
|
||||
)
|
||||
|
||||
return
|
||||
else:
|
||||
raise CommandError("Instance does not exist")
|
||||
else:
|
||||
raise CommandError("Invalid license key provided")
|
||||
|
||||
except RequestException:
|
||||
raise CommandError("Could not verify the license key")
|
||||
@@ -87,7 +87,7 @@ from .workspace import (
|
||||
|
||||
from .importer import Importer
|
||||
|
||||
from .page import Page, PageLog, PageFavorite, PageLabel
|
||||
from .page import Page, PageLog, PageFavorite, PageLabel, ProjectPage, TeamPage
|
||||
|
||||
from .estimate import Estimate, EstimatePoint
|
||||
|
||||
@@ -109,6 +109,6 @@ from .dashboard import Dashboard, DashboardWidget, Widget
|
||||
|
||||
from .favorite import UserFavorite
|
||||
|
||||
from .issue_type import IssueType
|
||||
from .issue_type import IssueType, ProjectIssueType
|
||||
|
||||
from .recent_visit import UserRecentVisit
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from django.conf import settings
|
||||
|
||||
# Django imports
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
# Module imports
|
||||
from .workspace import WorkspaceBaseModel
|
||||
|
||||
@@ -7,7 +7,7 @@ from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
|
||||
# Modeule imports
|
||||
# Module imports
|
||||
from plane.db.mixins import AuditModel
|
||||
|
||||
# Module imports
|
||||
|
||||
@@ -15,9 +15,15 @@ from django.db import models
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
from django.utils import timezone
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from sentry_sdk import capture_exception
|
||||
from slack_sdk import WebClient
|
||||
from slack_sdk.errors import SlackApiError
|
||||
|
||||
# Module imports
|
||||
from ..mixins import TimeAuditModel
|
||||
from plane.db.mixins import TimeAuditModel
|
||||
|
||||
|
||||
def get_default_onboarding():
|
||||
@@ -215,3 +221,23 @@ def create_user_notification(sender, instance, created, **kwargs):
|
||||
mention=False,
|
||||
issue_completed=False,
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_save, sender=User)
|
||||
def send_welcome_slack(sender, instance, created, **kwargs):
|
||||
try:
|
||||
if created and not instance.is_bot:
|
||||
# Send message on slack as well
|
||||
if settings.SLACK_BOT_TOKEN:
|
||||
client = WebClient(token=settings.SLACK_BOT_TOKEN)
|
||||
try:
|
||||
_ = client.chat_postMessage(
|
||||
channel="#trackers",
|
||||
text=f"New user {instance.email} has signed up and begun the onboarding journey.",
|
||||
)
|
||||
except SlackApiError as e:
|
||||
print(f"Got an error: {e.response['error']}")
|
||||
return
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return
|
||||
|
||||
@@ -109,7 +109,6 @@ class IssueView(WorkspaceBaseModel):
|
||||
)
|
||||
is_locked = models.BooleanField(default=False)
|
||||
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Issue View"
|
||||
verbose_name_plural = "Issue Views"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user