mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Compare commits
228 Commits
changelog-
...
preview
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9de5b1a009 | ||
|
|
21c59692f9 | ||
|
|
806619d73f | ||
|
|
c5e5454265 | ||
|
|
3972b6639f | ||
|
|
51e146f8ca | ||
|
|
6450793d72 | ||
|
|
dffe3a844f | ||
|
|
7ead606798 | ||
|
|
fa150c2b47 | ||
|
|
c3273b1a85 | ||
|
|
7cec92113f | ||
|
|
8cc513ba5e | ||
|
|
784b8da9be | ||
|
|
1d443310ce | ||
|
|
cc49a2ca4f | ||
|
|
ee53ee33d0 | ||
|
|
99f9337f35 | ||
|
|
1458c758a3 | ||
|
|
2d9988f584 | ||
|
|
27fa439c8d | ||
|
|
57935a94cc | ||
|
|
876ccce86b | ||
|
|
a67dba45f8 | ||
|
|
ef8e613358 | ||
|
|
c4d2c5b1bb | ||
|
|
d4705e1649 | ||
|
|
8228ecc087 | ||
|
|
ce08c9193c | ||
|
|
69d5cd183f | ||
|
|
6e1734c86e | ||
|
|
05f56d04ba | ||
|
|
3f60f14bb1 | ||
|
|
e0b0aafc2b | ||
|
|
e0fa6553ae | ||
|
|
55f06cf546 | ||
|
|
84879ee3bd | ||
|
|
b1162395ed | ||
|
|
b93883fc14 | ||
|
|
4913c116d1 | ||
|
|
4044ce25ce | ||
|
|
a5f3bd15b1 | ||
|
|
63d025cbf4 | ||
|
|
27f74206a3 | ||
|
|
e20bfa55d6 | ||
|
|
f8353d3468 | ||
|
|
57479f4554 | ||
|
|
514686d9d5 | ||
|
|
98a00f5bde | ||
|
|
ed4ee3ad7e | ||
|
|
18e4c60b42 | ||
|
|
849b7b7bf3 | ||
|
|
2746bad86b | ||
|
|
008727d393 | ||
|
|
56e4d2c6f1 | ||
|
|
d7e58a60fa | ||
|
|
af81064961 | ||
|
|
3c6e2b4447 | ||
|
|
b7be45d08a | ||
|
|
d2629d723c | ||
|
|
9007ec3709 | ||
|
|
763a28ab60 | ||
|
|
5c22a6cecc | ||
|
|
4c3af7f8a1 | ||
|
|
cc673a17a0 | ||
|
|
2377474823 | ||
|
|
f3daac6f95 | ||
|
|
5660b28574 | ||
|
|
d7d1545801 | ||
|
|
3ab1f0de84 | ||
|
|
a75ae71ff0 | ||
|
|
d5eb374217 | ||
|
|
df4ea1f7ac | ||
|
|
07c80bb02c | ||
|
|
1ad792b4bb | ||
|
|
0eb4af9d19 | ||
|
|
7136b3129b | ||
|
|
48f1999c95 | ||
|
|
3783e34ae8 | ||
|
|
e313aee3df | ||
|
|
6bb79df0eb | ||
|
|
ec0ef98c1b | ||
|
|
9523c28c3e | ||
|
|
156ed329ac | ||
|
|
ef0e3dca12 | ||
|
|
d8f2c97810 | ||
|
|
89983b06d2 | ||
|
|
3224122df0 | ||
|
|
99127ff8e4 | ||
|
|
da5390fa03 | ||
|
|
ac22df3f88 | ||
|
|
df762afaef | ||
|
|
2058f06b8a | ||
|
|
bfd4bd5e75 | ||
|
|
ac928f263a | ||
|
|
62065a6ebb | ||
|
|
67b62dcbe3 | ||
|
|
a427367720 | ||
|
|
c067eaa1ed | ||
|
|
2c70c1aaa8 | ||
|
|
f90e553881 | ||
|
|
0af0e52275 | ||
|
|
7829e3adf5 | ||
|
|
47354f0e91 | ||
|
|
db2f783d33 | ||
|
|
6d01622663 | ||
|
|
dff2f8ae12 | ||
|
|
4c57ed4336 | ||
|
|
c8dab1cc9c | ||
|
|
17c90a9d93 | ||
|
|
a3714c8e3e | ||
|
|
71cd36865b | ||
|
|
4501e44702 | ||
|
|
f661e06582 | ||
|
|
86f3ff1bd2 | ||
|
|
fce4729f22 | ||
|
|
bb48250467 | ||
|
|
5fbe411e04 | ||
|
|
2e75ff7f1c | ||
|
|
eb4239417a | ||
|
|
c6fbbfd8cc | ||
|
|
4d0a7e4658 | ||
|
|
1cb49cacae | ||
|
|
3618ffc2c3 | ||
|
|
fcb6e269a0 | ||
|
|
853423608c | ||
|
|
9d88fc999c | ||
|
|
dadd76b3ed | ||
|
|
6ce700fd5d | ||
|
|
0225d806cc | ||
|
|
fd9da3164e | ||
|
|
a4ec80ceca | ||
|
|
16332e0f6d | ||
|
|
f40dda8fdc | ||
|
|
13ab0624d0 | ||
|
|
388588c588 | ||
|
|
c0f986cf95 | ||
|
|
ab79a5da10 | ||
|
|
a2a62e2731 | ||
|
|
e306a92adb | ||
|
|
0f0c4b5293 | ||
|
|
28375c46e5 | ||
|
|
b909416c74 | ||
|
|
509db32267 | ||
|
|
fdbe4c2ca6 | ||
|
|
6bee97eb26 | ||
|
|
ba884d1e4d | ||
|
|
f2a87e8f15 | ||
|
|
4f5272c8af | ||
|
|
805cfed1a3 | ||
|
|
1de95ef0d0 | ||
|
|
618fcf934f | ||
|
|
2f22ca0aea | ||
|
|
b97d4c4def | ||
|
|
635d550d88 | ||
|
|
4333cfbd11 | ||
|
|
9789e176d2 | ||
|
|
59919d3874 | ||
|
|
75235f2ad5 | ||
|
|
6f27ec031d | ||
|
|
7d141f26ad | ||
|
|
71012d24ab | ||
|
|
7d417c5457 | ||
|
|
021b6eebdd | ||
|
|
e184b646e6 | ||
|
|
549a0009e7 | ||
|
|
944b873184 | ||
|
|
6000639921 | ||
|
|
6ea24bfdcd | ||
|
|
8cc23bc4a5 | ||
|
|
7153064ebb | ||
|
|
5874636b0b | ||
|
|
f679628365 | ||
|
|
ba6b822f60 | ||
|
|
757019bf43 | ||
|
|
295eb1ef72 | ||
|
|
1fcffad7dd | ||
|
|
0b159c4963 | ||
|
|
cfe169c6d7 | ||
|
|
fa9c63716c | ||
|
|
d3f1b511ad | ||
|
|
e624a17868 | ||
|
|
670da9fa03 | ||
|
|
b03844ee2d | ||
|
|
f36d0691fe | ||
|
|
e7d888d817 | ||
|
|
2f6923fca0 | ||
|
|
912246c592 | ||
|
|
4a065e14d0 | ||
|
|
e09aeab5b8 | ||
|
|
25a6cd49fc | ||
|
|
b5538565c7 | ||
|
|
b8043f92b1 | ||
|
|
0e91feacc3 | ||
|
|
40c0922726 | ||
|
|
dee8f00a71 | ||
|
|
072f2e2cac | ||
|
|
79c2dfd293 | ||
|
|
22a9d48ca3 | ||
|
|
fbcc8fc8a0 | ||
|
|
c1fa372c84 | ||
|
|
7045a1f2af | ||
|
|
f26b4d3d06 | ||
|
|
c3c1aef7a9 | ||
|
|
24e57009af | ||
|
|
2b7a17b484 | ||
|
|
64fd0b2830 | ||
|
|
8988cf9a85 | ||
|
|
eb5ffebcc6 | ||
|
|
414010688d | ||
|
|
171099667e | ||
|
|
d65f0e264e | ||
|
|
c7d17d00b7 | ||
|
|
9cdfb2224a | ||
|
|
8129f5f969 | ||
|
|
89b8cdbe6e | ||
|
|
53e6a62a12 | ||
|
|
59a682f6d9 | ||
|
|
6fd448ddac | ||
|
|
75f89c4c12 | ||
|
|
91c4047afd | ||
|
|
0983e5f44d | ||
|
|
2014400bed | ||
|
|
dffcc6dc10 | ||
|
|
640b23fb1b | ||
|
|
e13d8aa4b3 | ||
|
|
cf595de7c7 | ||
|
|
0fa9c8b015 |
16
.env.example
16
.env.example
@@ -15,12 +15,15 @@ RABBITMQ_USER="plane"
|
||||
RABBITMQ_PASSWORD="plane"
|
||||
RABBITMQ_VHOST="plane"
|
||||
|
||||
LISTEN_HTTP_PORT=80
|
||||
LISTEN_HTTPS_PORT=443
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
AWS_SECRET_ACCESS_KEY="secret-key"
|
||||
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
|
||||
# Changing this requires change in the nginx.conf for uploads if using minio setup
|
||||
# Changing this requires change in the proxy config for uploads if using minio setup
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
# Maximum file upload limit
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
@@ -36,8 +39,15 @@ DOCKERIZED=1 # deprecated
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
# If SSL Cert to be generated, set CERT_EMAIl="email <EMAIL_ADDRESS>"
|
||||
CERT_ACME_CA=https://acme-v02.api.letsencrypt.org/directory
|
||||
TRUSTED_PROXIES=0.0.0.0/0
|
||||
SITE_ADDRESS=:80
|
||||
CERT_EMAIL=
|
||||
|
||||
# For DNS Challenge based certificate generation, set the CERT_ACME_DNS, CERT_EMAIL
|
||||
# CERT_ACME_DNS="acme_dns <CERT_DNS_PROVIDER> <CERT_DNS_PROVIDER_API_KEY>"
|
||||
CERT_ACME_DNS=
|
||||
|
||||
# Force HTTPS for handling SSL Termination
|
||||
MINIO_ENDPOINT_SSL=0
|
||||
|
||||
139
.github/workflows/build-aio-base.yml
vendored
139
.github/workflows/build-aio-base.yml
vendored
@@ -1,139 +0,0 @@
|
||||
name: Build AIO Base Image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
base_tag_name:
|
||||
description: 'Base Tag Name'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
env:
|
||||
TARGET_BRANCH: ${{ github.ref_name }}
|
||||
|
||||
jobs:
|
||||
base_build_setup:
|
||||
name: Build Preparation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
image_tag: ${{ steps.set_env_variables.outputs.IMAGE_TAG }}
|
||||
|
||||
steps:
|
||||
- id: set_env_variables
|
||||
name: Set Environment Variables
|
||||
run: |
|
||||
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ github.event.inputs.base_tag_name }}" != "" ]; then
|
||||
echo "IMAGE_TAG=${{ github.event.inputs.base_tag_name }}" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
echo "IMAGE_TAG=latest" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "preview" ]; then
|
||||
echo "IMAGE_TAG=preview" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "IMAGE_TAG=develop" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
full_base_build_push:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [base_build_setup]
|
||||
env:
|
||||
BASE_IMG_TAG: makeplane/plane-aio-base:full-${{ needs.base_build_setup.outputs.image_tag }}
|
||||
BUILDX_DRIVER: ${{ needs.base_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.base_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.base_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.base_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v6.9.0
|
||||
with:
|
||||
context: ./aio
|
||||
file: ./aio/Dockerfile-base-full
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.BASE_IMG_TAG }}
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
slim_base_build_push:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [base_build_setup]
|
||||
env:
|
||||
BASE_IMG_TAG: makeplane/plane-aio-base:slim-${{ needs.base_build_setup.outputs.image_tag }}
|
||||
BUILDX_DRIVER: ${{ needs.base_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.base_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.base_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.base_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v6.9.0
|
||||
with:
|
||||
context: ./aio
|
||||
file: ./aio/Dockerfile-base-slim
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.BASE_IMG_TAG }}
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
207
.github/workflows/build-aio-branch.yml
vendored
207
.github/workflows/build-aio-branch.yml
vendored
@@ -1,207 +0,0 @@
|
||||
name: Branch Build AIO
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
full:
|
||||
description: 'Run full build'
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
slim:
|
||||
description: 'Run slim build'
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
base_tag_name:
|
||||
description: 'Base Tag Name'
|
||||
required: false
|
||||
default: ''
|
||||
release:
|
||||
types: [released, prereleased]
|
||||
|
||||
env:
|
||||
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
|
||||
FULL_BUILD_INPUT: ${{ github.event.inputs.full }}
|
||||
SLIM_BUILD_INPUT: ${{ github.event.inputs.slim }}
|
||||
|
||||
jobs:
|
||||
branch_build_setup:
|
||||
name: Build Setup
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||
flat_branch_name: ${{ steps.set_env_variables.outputs.FLAT_BRANCH_NAME }}
|
||||
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
aio_base_tag: ${{ steps.set_env_variables.outputs.AIO_BASE_TAG }}
|
||||
do_full_build: ${{ steps.set_env_variables.outputs.DO_FULL_BUILD }}
|
||||
do_slim_build: ${{ steps.set_env_variables.outputs.DO_SLIM_BUILD }}
|
||||
|
||||
steps:
|
||||
- id: set_env_variables
|
||||
name: Set Environment Variables
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ] || [ "${{ github.event_name }}" == "release" ]; then
|
||||
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "AIO_BASE_TAG=latest" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ github.event_name}}" == "workflow_dispatch" ] && [ "${{ github.event.inputs.base_tag_name }}" != "" ]; then
|
||||
echo "AIO_BASE_TAG=${{ github.event.inputs.base_tag_name }}" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "preview" ]; then
|
||||
echo "AIO_BASE_TAG=preview" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "AIO_BASE_TAG=develop" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
fi
|
||||
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ env.FULL_BUILD_INPUT }}" == "true" ] || [ "${{github.event_name}}" == "push" ] || [ "${{github.event_name}}" == "release" ]; then
|
||||
echo "DO_FULL_BUILD=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "DO_FULL_BUILD=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [ "${{ env.SLIM_BUILD_INPUT }}" == "true" ] || [ "${{github.event_name}}" == "push" ] || [ "${{github.event_name}}" == "release" ]; then
|
||||
echo "DO_SLIM_BUILD=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "DO_SLIM_BUILD=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
FLAT_BRANCH_NAME=$(echo "${{ env.TARGET_BRANCH }}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "FLAT_BRANCH_NAME=$FLAT_BRANCH_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
full_build_push:
|
||||
if: ${{ needs.branch_build_setup.outputs.do_full_build == 'true' }}
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BUILD_TYPE: full
|
||||
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
|
||||
AIO_IMAGE_TAGS: makeplane/plane-aio:full-${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-stable,makeplane/plane-aio:${{env.BUILD_TYPE}}-${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-latest
|
||||
else
|
||||
TAG=${{ env.AIO_IMAGE_TAGS }}
|
||||
fi
|
||||
echo "AIO_IMAGE_TAGS=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v6.9.0
|
||||
with:
|
||||
context: .
|
||||
file: ./aio/Dockerfile-app
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.AIO_IMAGE_TAGS }}
|
||||
push: true
|
||||
build-args: |
|
||||
BASE_TAG=${{ env.AIO_BASE_TAG }}
|
||||
BUILD_TYPE=${{env.BUILD_TYPE}}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
slim_build_push:
|
||||
if: ${{ needs.branch_build_setup.outputs.do_slim_build == 'true' }}
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BUILD_TYPE: slim
|
||||
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
|
||||
AIO_IMAGE_TAGS: makeplane/plane-aio:slim-${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-stable,makeplane/plane-aio:${{env.BUILD_TYPE}}-${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-latest
|
||||
else
|
||||
TAG=${{ env.AIO_IMAGE_TAGS }}
|
||||
fi
|
||||
echo "AIO_IMAGE_TAGS=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v6.9.0
|
||||
with:
|
||||
context: .
|
||||
file: ./aio/Dockerfile-app
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.AIO_IMAGE_TAGS }}
|
||||
push: true
|
||||
build-args: |
|
||||
BASE_TAG=${{ env.AIO_BASE_TAG }}
|
||||
BUILD_TYPE=${{env.BUILD_TYPE}}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
143
.github/workflows/build-branch.yml
vendored
143
.github/workflows/build-branch.yml
vendored
@@ -25,6 +25,11 @@ on:
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
aio_build:
|
||||
description: "Build for AIO docker image"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
push:
|
||||
branches:
|
||||
- preview
|
||||
@@ -36,6 +41,7 @@ env:
|
||||
BUILD_TYPE: ${{ github.event.inputs.build_type }}
|
||||
RELEASE_VERSION: ${{ github.event.inputs.releaseVersion }}
|
||||
IS_PRERELEASE: ${{ github.event.inputs.isPrerelease }}
|
||||
AIO_BUILD: ${{ github.event.inputs.aio_build }}
|
||||
|
||||
jobs:
|
||||
branch_build_setup:
|
||||
@@ -54,11 +60,13 @@ jobs:
|
||||
dh_img_live: ${{ steps.set_env_variables.outputs.DH_IMG_LIVE }}
|
||||
dh_img_backend: ${{ steps.set_env_variables.outputs.DH_IMG_BACKEND }}
|
||||
dh_img_proxy: ${{ steps.set_env_variables.outputs.DH_IMG_PROXY }}
|
||||
dh_img_aio: ${{ steps.set_env_variables.outputs.DH_IMG_AIO }}
|
||||
|
||||
build_type: ${{steps.set_env_variables.outputs.BUILD_TYPE}}
|
||||
build_release: ${{ steps.set_env_variables.outputs.BUILD_RELEASE }}
|
||||
build_prerelease: ${{ steps.set_env_variables.outputs.BUILD_PRERELEASE }}
|
||||
release_version: ${{ steps.set_env_variables.outputs.RELEASE_VERSION }}
|
||||
aio_build: ${{ steps.set_env_variables.outputs.AIO_BUILD }}
|
||||
|
||||
steps:
|
||||
- id: set_env_variables
|
||||
@@ -84,12 +92,15 @@ jobs:
|
||||
echo "DH_IMG_LIVE=plane-live" >> $GITHUB_OUTPUT
|
||||
echo "DH_IMG_BACKEND=plane-backend" >> $GITHUB_OUTPUT
|
||||
echo "DH_IMG_PROXY=plane-proxy" >> $GITHUB_OUTPUT
|
||||
echo "DH_IMG_AIO=plane-aio-community" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "BUILD_TYPE=${{env.BUILD_TYPE}}" >> $GITHUB_OUTPUT
|
||||
BUILD_RELEASE=false
|
||||
BUILD_PRERELEASE=false
|
||||
RELVERSION="latest"
|
||||
|
||||
BUILD_AIO=${{ env.AIO_BUILD }}
|
||||
|
||||
if [ "${{ env.BUILD_TYPE }}" == "Release" ]; then
|
||||
FLAT_RELEASE_VERSION=$(echo "${{ env.RELEASE_VERSION }}" | sed 's/[^a-zA-Z0-9.-]//g')
|
||||
echo "FLAT_RELEASE_VERSION=${FLAT_RELEASE_VERSION}" >> $GITHUB_OUTPUT
|
||||
@@ -108,10 +119,14 @@ jobs:
|
||||
if [ "${{ env.IS_PRERELEASE }}" == "true" ]; then
|
||||
BUILD_PRERELEASE=true
|
||||
fi
|
||||
|
||||
BUILD_AIO=true
|
||||
fi
|
||||
|
||||
echo "BUILD_RELEASE=${BUILD_RELEASE}" >> $GITHUB_OUTPUT
|
||||
echo "BUILD_PRERELEASE=${BUILD_PRERELEASE}" >> $GITHUB_OUTPUT
|
||||
echo "RELEASE_VERSION=${RELVERSION}" >> $GITHUB_OUTPUT
|
||||
echo "AIO_BUILD=${BUILD_AIO}" >> $GITHUB_OUTPUT
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
@@ -133,7 +148,7 @@ jobs:
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_admin }}
|
||||
build-context: .
|
||||
dockerfile-path: ./admin/Dockerfile.admin
|
||||
dockerfile-path: ./apps/admin/Dockerfile.admin
|
||||
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
@@ -155,7 +170,7 @@ jobs:
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_web }}
|
||||
build-context: .
|
||||
dockerfile-path: ./web/Dockerfile.web
|
||||
dockerfile-path: ./apps/web/Dockerfile.web
|
||||
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
@@ -177,7 +192,7 @@ jobs:
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_space }}
|
||||
build-context: .
|
||||
dockerfile-path: ./space/Dockerfile.space
|
||||
dockerfile-path: ./apps/space/Dockerfile.space
|
||||
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
@@ -199,13 +214,13 @@ jobs:
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_live }}
|
||||
build-context: .
|
||||
dockerfile-path: ./live/Dockerfile.live
|
||||
dockerfile-path: ./apps/live/Dockerfile.live
|
||||
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_apiserver:
|
||||
branch_build_push_api:
|
||||
name: Build-Push API Server Docker Image
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
@@ -220,8 +235,8 @@ jobs:
|
||||
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_backend }}
|
||||
build-context: ./apiserver
|
||||
dockerfile-path: ./apiserver/Dockerfile.api
|
||||
build-context: ./apps/api
|
||||
dockerfile-path: ./apps/api/Dockerfile.api
|
||||
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
@@ -242,13 +257,102 @@ jobs:
|
||||
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_proxy }}
|
||||
build-context: ./nginx
|
||||
dockerfile-path: ./nginx/Dockerfile
|
||||
build-context: ./apps/proxy
|
||||
dockerfile-path: ./apps/proxy/Dockerfile.ce
|
||||
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_aio:
|
||||
if: ${{ needs.branch_build_setup.outputs.aio_build == 'true' }}
|
||||
name: Build-Push AIO Docker Image
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [
|
||||
branch_build_setup,
|
||||
branch_build_push_admin,
|
||||
branch_build_push_web,
|
||||
branch_build_push_space,
|
||||
branch_build_push_live,
|
||||
branch_build_push_api,
|
||||
branch_build_push_proxy
|
||||
]
|
||||
steps:
|
||||
- name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Prepare AIO Assets
|
||||
id: prepare_aio_assets
|
||||
run: |
|
||||
cd deployments/aio/community
|
||||
|
||||
if [ "${{ needs.branch_build_setup.outputs.build_type }}" == "Release" ]; then
|
||||
aio_version=${{ needs.branch_build_setup.outputs.release_version }}
|
||||
else
|
||||
aio_version=${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
fi
|
||||
bash ./build.sh --release $aio_version
|
||||
echo "AIO_BUILD_VERSION=${aio_version}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Upload AIO Assets
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
path: ./deployments/aio/community/dist
|
||||
name: aio-assets-dist
|
||||
|
||||
- name: AIO Build and Push
|
||||
uses: makeplane/actions/build-push@v1.1.0
|
||||
with:
|
||||
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
|
||||
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
|
||||
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_aio }}
|
||||
build-context: ./deployments/aio/community
|
||||
dockerfile-path: ./deployments/aio/community/Dockerfile
|
||||
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
additional-assets: aio-assets-dist
|
||||
additional-assets-dir: ./deployments/aio/community/dist
|
||||
build-args: |
|
||||
PLANE_VERSION=${{ steps.prepare_aio_assets.outputs.AIO_BUILD_VERSION }}
|
||||
|
||||
upload_build_assets:
|
||||
name: Upload Build Assets
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup, branch_build_push_admin, branch_build_push_web, branch_build_push_space, branch_build_push_live, branch_build_push_api, branch_build_push_proxy]
|
||||
steps:
|
||||
- name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Update Assets
|
||||
run: |
|
||||
if [ "${{ needs.branch_build_setup.outputs.build_type }}" == "Release" ]; then
|
||||
REL_VERSION=${{ needs.branch_build_setup.outputs.release_version }}
|
||||
else
|
||||
REL_VERSION=${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
fi
|
||||
|
||||
cp ./deployments/cli/community/install.sh deployments/cli/community/setup.sh
|
||||
sed -i 's/${APP_RELEASE:-stable}/${APP_RELEASE:-'${REL_VERSION}'}/g' deployments/cli/community/docker-compose.yml
|
||||
# sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deployments/cli/community/variables.env
|
||||
|
||||
- name: Upload Assets
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: community-assets
|
||||
path: |
|
||||
./deployments/cli/community/setup.sh
|
||||
./deployments/cli/community/restore.sh
|
||||
./deployments/cli/community/restore-airgapped.sh
|
||||
./deployments/cli/community/docker-compose.yml
|
||||
./deployments/cli/community/variables.env
|
||||
./deployments/swarm/community/swarm.sh
|
||||
|
||||
publish_release:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_type == 'Release' }}
|
||||
name: Build Release
|
||||
@@ -260,7 +364,7 @@ jobs:
|
||||
branch_build_push_web,
|
||||
branch_build_push_space,
|
||||
branch_build_push_live,
|
||||
branch_build_push_apiserver,
|
||||
branch_build_push_api,
|
||||
branch_build_push_proxy,
|
||||
]
|
||||
env:
|
||||
@@ -271,9 +375,9 @@ jobs:
|
||||
|
||||
- name: Update Assets
|
||||
run: |
|
||||
cp ./deploy/selfhost/install.sh deploy/selfhost/setup.sh
|
||||
sed -i 's/${APP_RELEASE:-stable}/${APP_RELEASE:-'${REL_VERSION}'}/g' deploy/selfhost/docker-compose.yml
|
||||
# sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deploy/selfhost/variables.env
|
||||
cp ./deployments/cli/community/install.sh deployments/cli/community/setup.sh
|
||||
sed -i 's/${APP_RELEASE:-stable}/${APP_RELEASE:-'${REL_VERSION}'}/g' deployments/cli/community/docker-compose.yml
|
||||
# sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deployments/cli/community/variables.env
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
@@ -287,9 +391,10 @@ jobs:
|
||||
prerelease: ${{ env.IS_PRERELEASE }}
|
||||
generate_release_notes: true
|
||||
files: |
|
||||
${{ github.workspace }}/deploy/selfhost/setup.sh
|
||||
${{ github.workspace }}/deploy/selfhost/swarm.sh
|
||||
${{ github.workspace }}/deploy/selfhost/restore.sh
|
||||
${{ github.workspace }}/deploy/selfhost/restore-airgapped.sh
|
||||
${{ github.workspace }}/deploy/selfhost/docker-compose.yml
|
||||
${{ github.workspace }}/deploy/selfhost/variables.env
|
||||
${{ github.workspace }}/deployments/cli/community/setup.sh
|
||||
${{ github.workspace }}/deployments/cli/community/restore.sh
|
||||
${{ github.workspace }}/deployments/cli/community/restore-airgapped.sh
|
||||
${{ github.workspace }}/deployments/cli/community/docker-compose.yml
|
||||
${{ github.workspace }}/deployments/cli/community/variables.env
|
||||
${{ github.workspace }}/deployments/swarm/community/swarm.sh
|
||||
|
||||
|
||||
95
.github/workflows/build-test-pull-request.yml
vendored
95
.github/workflows/build-test-pull-request.yml
vendored
@@ -1,95 +0,0 @@
|
||||
name: Build and Lint on Pull Request
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
types: ["opened", "synchronize", "ready_for_review"]
|
||||
|
||||
jobs:
|
||||
lint-apiserver:
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x" # Specify the Python version you need
|
||||
- name: Install Pylint
|
||||
run: python -m pip install ruff
|
||||
- name: Install Apiserver Dependencies
|
||||
run: cd apiserver && pip install -r requirements.txt
|
||||
- name: Lint apiserver
|
||||
run: ruff check --fix apiserver
|
||||
|
||||
lint-admin:
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
- run: yarn install
|
||||
- run: yarn lint --filter=admin
|
||||
|
||||
lint-space:
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
- run: yarn install
|
||||
- run: yarn lint --filter=space
|
||||
|
||||
lint-web:
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
- run: yarn install
|
||||
- run: yarn lint --filter=web
|
||||
|
||||
build-admin:
|
||||
needs: lint-admin
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
- run: yarn install
|
||||
- run: yarn build --filter=admin
|
||||
|
||||
build-space:
|
||||
needs: lint-space
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
- run: yarn install
|
||||
- run: yarn build --filter=space
|
||||
|
||||
build-web:
|
||||
needs: lint-web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
- run: yarn install
|
||||
- run: yarn build --filter=web
|
||||
30
.github/workflows/pull-request-build-lint-api.yml
vendored
Normal file
30
.github/workflows/pull-request-build-lint-api.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Build and lint API
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
branches: ["preview"]
|
||||
types: ["opened", "synchronize", "ready_for_review", "review_requested", "reopened"]
|
||||
paths:
|
||||
- "apps/api/**"
|
||||
|
||||
jobs:
|
||||
lint-api:
|
||||
name: Lint API
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 25
|
||||
if: |
|
||||
github.event.pull_request.draft == false &&
|
||||
github.event.pull_request.requested_reviewers != null
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Install Pylint
|
||||
run: python -m pip install ruff
|
||||
- name: Install API Dependencies
|
||||
run: cd apps/api && pip install -r requirements.txt
|
||||
- name: Lint apps/api
|
||||
run: ruff check --fix apps/api
|
||||
43
.github/workflows/pull-request-build-lint-web-apps.yml
vendored
Normal file
43
.github/workflows/pull-request-build-lint-web-apps.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: Build and lint web apps
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
branches: ["preview"]
|
||||
types: ["opened", "synchronize", "ready_for_review", "review_requested", "reopened"]
|
||||
paths:
|
||||
- "**.tsx?"
|
||||
- "**.jsx?"
|
||||
- "**.css"
|
||||
- "**.json"
|
||||
- "!apps/api/**"
|
||||
|
||||
jobs:
|
||||
build-and-lint:
|
||||
name: Build and lint web apps
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 25
|
||||
if: |
|
||||
github.event.pull_request.draft == false &&
|
||||
github.event.pull_request.requested_reviewers != null
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install --frozen-lockfile
|
||||
|
||||
- name: Build web apps
|
||||
run: yarn run build
|
||||
|
||||
- name: Lint web apps
|
||||
run: yarn run ci:lint
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ When opening a new issue, please use a clear and concise title that follows this
|
||||
- For documentation: `📘 Docs: [short description]`
|
||||
|
||||
**Examples:**
|
||||
|
||||
- `🐛 Bug: API token expiry time not saving correctly`
|
||||
- `📘 Docs: Clarify RAM requirement for local setup`
|
||||
- `🚀 Feature: Allow custom time selection for token expiration`
|
||||
@@ -47,7 +48,7 @@ This helps us triage and manage issues more efficiently.
|
||||
|
||||
The project is a monorepo, with backend api and frontend in a single repo.
|
||||
|
||||
The backend is a django project which is kept inside apiserver
|
||||
The backend is a django project which is kept inside apps/api
|
||||
|
||||
1. Clone the repo
|
||||
|
||||
@@ -105,11 +106,13 @@ To ensure consistency throughout the source code, please keep these rules in min
|
||||
- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
|
||||
|
||||
## Contributing to language support
|
||||
|
||||
This guide is designed to help contributors understand how to add or update translations in the application.
|
||||
|
||||
### Understanding translation structure
|
||||
|
||||
#### File organization
|
||||
|
||||
Translations are organized by language in the locales directory. Each language has its own folder containing JSON files for translations. Here's how it looks:
|
||||
|
||||
```
|
||||
@@ -122,7 +125,9 @@ packages/i18n/src/locales/
|
||||
└── [language]/
|
||||
└── translations.json
|
||||
```
|
||||
|
||||
#### Nested structure
|
||||
|
||||
To keep translations organized, we use a nested structure for keys. This makes it easier to manage and locate specific translations. For example:
|
||||
|
||||
```json
|
||||
@@ -137,32 +142,37 @@ To keep translations organized, we use a nested structure for keys. This makes i
|
||||
```
|
||||
|
||||
### Translation formatting guide
|
||||
|
||||
We use [IntlMessageFormat](https://formatjs.github.io/docs/intl-messageformat/) to handle dynamic content, such as variables and pluralization. Here's how to format your translations:
|
||||
|
||||
#### Examples
|
||||
|
||||
- **Simple variables**
|
||||
```json
|
||||
{
|
||||
|
||||
```json
|
||||
{
|
||||
"greeting": "Hello, {name}!"
|
||||
}
|
||||
```
|
||||
}
|
||||
```
|
||||
|
||||
- **Pluralization**
|
||||
```json
|
||||
{
|
||||
```json
|
||||
{
|
||||
"items": "{count, plural, one {Work item} other {Work items}}"
|
||||
}
|
||||
```
|
||||
}
|
||||
```
|
||||
|
||||
### Contributing guidelines
|
||||
|
||||
#### Updating existing translations
|
||||
|
||||
1. Locate the key in `locales/<language>/translations.json`.
|
||||
|
||||
2. Update the value while ensuring the key structure remains intact.
|
||||
3. Preserve any existing ICU formats (e.g., variables, pluralization).
|
||||
|
||||
#### Adding new translation keys
|
||||
|
||||
1. When introducing a new key, ensure it is added to **all** language files, even if translations are not immediately available. Use English as a placeholder if needed.
|
||||
|
||||
2. Keep the nesting structure consistent across all languages.
|
||||
@@ -170,48 +180,48 @@ We use [IntlMessageFormat](https://formatjs.github.io/docs/intl-messageformat/)
|
||||
3. If the new key requires dynamic content (e.g., variables or pluralization), ensure the ICU format is applied uniformly across all languages.
|
||||
|
||||
### Adding new languages
|
||||
|
||||
Adding a new language involves several steps to ensure it integrates seamlessly with the project. Follow these instructions carefully:
|
||||
|
||||
1. **Update type definitions**
|
||||
Add the new language to the TLanguage type in the language definitions file:
|
||||
1. **Update type definitions**
|
||||
Add the new language to the TLanguage type in the language definitions file:
|
||||
|
||||
```typescript
|
||||
// types/language.ts
|
||||
export type TLanguage = "en" | "fr" | "your-lang";
|
||||
```
|
||||
```ts
|
||||
// packages/i18n/src/types/language.ts
|
||||
export type TLanguage = "en" | "fr" | "your-lang";
|
||||
```
|
||||
|
||||
2. **Add language configuration**
|
||||
Include the new language in the list of supported languages:
|
||||
1. **Add language configuration**
|
||||
Include the new language in the list of supported languages:
|
||||
```ts
|
||||
// packages/i18n/src/constants/language.ts
|
||||
export const SUPPORTED_LANGUAGES: ILanguageOption[] = [
|
||||
{ label: "English", value: "en" },
|
||||
{ label: "Your Language", value: "your-lang" }
|
||||
];
|
||||
```
|
||||
|
||||
```typescript
|
||||
// constants/language.ts
|
||||
export const SUPPORTED_LANGUAGES: ILanguageOption[] = [
|
||||
{ label: "English", value: "en" },
|
||||
{ label: "Your Language", value: "your-lang" }
|
||||
];
|
||||
```
|
||||
|
||||
3. **Create translation files**
|
||||
1. Create a new folder for your language under locales (e.g., `locales/your-lang/`).
|
||||
2. **Create translation files**
|
||||
1. Create a new folder for your language under locales (e.g., `locales/your-lang/`).
|
||||
|
||||
2. Add a `translations.json` file inside the folder.
|
||||
|
||||
3. Copy the structure from an existing translation file and translate all keys.
|
||||
|
||||
4. **Update import logic**
|
||||
Modify the language import logic to include your new language:
|
||||
|
||||
```typescript
|
||||
private importLanguageFile(language: TLanguage): Promise<any> {
|
||||
switch (language) {
|
||||
case "your-lang":
|
||||
return import("../locales/your-lang/translations.json");
|
||||
// ...
|
||||
}
|
||||
}
|
||||
```
|
||||
3. **Update import logic**
|
||||
Modify the language import logic to include your new language:
|
||||
```ts
|
||||
private importLanguageFile(language: TLanguage): Promise<any> {
|
||||
switch (language) {
|
||||
case "your-lang":
|
||||
return import("../locales/your-lang/translations.json");
|
||||
// ...
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Quality checklist
|
||||
|
||||
Before submitting your contribution, please ensure the following:
|
||||
|
||||
- All translation keys exist in every language file.
|
||||
@@ -222,6 +232,7 @@ Before submitting your contribution, please ensure the following:
|
||||
- There are no missing or untranslated keys.
|
||||
|
||||
#### Pro tips
|
||||
|
||||
- When in doubt, refer to the English translations for context.
|
||||
- Verify pluralization works with different numbers.
|
||||
- Ensure dynamic values (e.g., `{name}`) are correctly interpolated.
|
||||
|
||||
88
ENV_SETUP.md
88
ENV_SETUP.md
@@ -1,88 +0,0 @@
|
||||
# Environment Variables
|
||||
|
||||
Environment variables are distributed in various files. Please refer them carefully.
|
||||
|
||||
## {PROJECT_FOLDER}/.env
|
||||
|
||||
File is available in the project root folder
|
||||
|
||||
```
|
||||
# Database Settings
|
||||
POSTGRES_USER="plane"
|
||||
POSTGRES_PASSWORD="plane"
|
||||
POSTGRES_DB="plane"
|
||||
PGDATA="/var/lib/postgresql/data"
|
||||
# Redis Settings
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
AWS_SECRET_ACCESS_KEY="secret-key"
|
||||
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
|
||||
# Changing this requires change in the nginx.conf for uploads if using minio setup
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
# Maximum file upload limit
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
# GPT settings
|
||||
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||
OPENAI_API_KEY="sk-" # deprecated
|
||||
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||
# Settings related to Docker
|
||||
DOCKERIZED=1 # deprecated
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
```
|
||||
|
||||
## {PROJECT_FOLDER}/apiserver/.env
|
||||
|
||||
```
|
||||
# Backend
|
||||
# Debug value for api server use it as 0 for production use
|
||||
DEBUG=0
|
||||
CORS_ALLOWED_ORIGINS="http://localhost"
|
||||
# Database Settings
|
||||
POSTGRES_USER="plane"
|
||||
POSTGRES_PASSWORD="plane"
|
||||
POSTGRES_HOST="plane-db"
|
||||
POSTGRES_DB="plane"
|
||||
POSTGRES_PORT=5432
|
||||
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||
# Redis Settings
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
AWS_SECRET_ACCESS_KEY="secret-key"
|
||||
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
|
||||
# Changing this requires change in the nginx.conf for uploads if using minio setup
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
# Maximum file upload limit
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
# Settings related to Docker
|
||||
DOCKERIZED=1 # deprecated
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
# Email redirections and minio domain settings
|
||||
WEB_URL="http://localhost"
|
||||
# Gunicorn Workers
|
||||
GUNICORN_WORKERS=2
|
||||
# Base URLs
|
||||
ADMIN_BASE_URL=
|
||||
SPACE_BASE_URL=
|
||||
APP_BASE_URL=
|
||||
SECRET_KEY="gxoytl7dmnc1y37zahah820z5iq3iozu38cnfjtu3yaau9cd9z"
|
||||
```
|
||||
|
||||
## Updates
|
||||
|
||||
- The naming convention for containers and images has been updated.
|
||||
- The plane-worker image will no longer be maintained, as it has been merged with plane-backend.
|
||||
- The Tiptap pro-extension dependency has been removed, eliminating the need for Tiptap API keys.
|
||||
- The image name for Plane deployment has been changed to plane-space.
|
||||
75
README.md
75
README.md
@@ -2,11 +2,10 @@
|
||||
|
||||
<p align="center">
|
||||
<a href="https://plane.so">
|
||||
<img src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_logo_.webp" alt="Plane Logo" width="70">
|
||||
<img src="https://media.docs.plane.so/logo/plane_github_readme.png" alt="Plane Logo" width="400">
|
||||
</a>
|
||||
</p>
|
||||
<h1 align="center"><b>Plane</b></h1>
|
||||
<p align="center"><b>Open-source project management that unlocks customer value</b></p>
|
||||
<p align="center"><b>Modern project management for all teams</b></p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://discord.com/invite/A92xrEGCge">
|
||||
@@ -25,14 +24,7 @@
|
||||
<p>
|
||||
<a href="https://app.plane.so/#gh-light-mode-only" target="_blank">
|
||||
<img
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_screen.webp"
|
||||
alt="Plane Screens"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
<a href="https://app.plane.so/#gh-dark-mode-only" target="_blank">
|
||||
<img
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_screens_dark_mode.webp"
|
||||
src="https://media.docs.plane.so/GitHub-readme/github-top.webp"
|
||||
alt="Plane Screens"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -48,13 +40,13 @@ Meet [Plane](https://plane.so/), an open-source project management tool to track
|
||||
Getting started with Plane is simple. Choose the setup that works best for you:
|
||||
|
||||
- **Plane Cloud**
|
||||
Sign up for a free account on [Plane Cloud](https://app.plane.so)—it's the fastest way to get up and running without worrying about infrastructure.
|
||||
Sign up for a free account on [Plane Cloud](https://app.plane.so)—it's the fastest way to get up and running without worrying about infrastructure.
|
||||
|
||||
- **Self-host Plane**
|
||||
Prefer full control over your data and infrastructure? Install and run Plane on your own servers. Follow our detailed [deployment guides](https://developers.plane.so/self-hosting/overview) to get started.
|
||||
Prefer full control over your data and infrastructure? Install and run Plane on your own servers. Follow our detailed [deployment guides](https://developers.plane.so/self-hosting/overview) to get started.
|
||||
|
||||
| Installation methods | Docs link |
|
||||
| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| Installation methods | Docs link |
|
||||
| -------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| Docker | [](https://developers.plane.so/self-hosting/methods/docker-compose) |
|
||||
| Kubernetes | [](https://developers.plane.so/self-hosting/methods/kubernetes) |
|
||||
|
||||
@@ -63,58 +55,58 @@ Prefer full control over your data and infrastructure? Install and run Plane on
|
||||
## 🌟 Features
|
||||
|
||||
- **Issues**
|
||||
Efficiently create and manage tasks with a robust rich text editor that supports file uploads. Enhance organization and tracking by adding sub-properties and referencing related issues.
|
||||
Efficiently create and manage tasks with a robust rich text editor that supports file uploads. Enhance organization and tracking by adding sub-properties and referencing related issues.
|
||||
|
||||
- **Cycles**
|
||||
Maintain your team’s momentum with Cycles. Track progress effortlessly using burn-down charts and other insightful tools.
|
||||
Maintain your team’s momentum with Cycles. Track progress effortlessly using burn-down charts and other insightful tools.
|
||||
|
||||
- **Modules**
|
||||
Simplify complex projects by dividing them into smaller, manageable modules.
|
||||
Simplify complex projects by dividing them into smaller, manageable modules.
|
||||
|
||||
- **Views**
|
||||
Customize your workflow by creating filters to display only the most relevant issues. Save and share these views with ease.
|
||||
Customize your workflow by creating filters to display only the most relevant issues. Save and share these views with ease.
|
||||
|
||||
- **Pages**
|
||||
Capture and organize ideas using Plane Pages, complete with AI capabilities and a rich text editor. Format text, insert images, add hyperlinks, or convert your notes into actionable items.
|
||||
Capture and organize ideas using Plane Pages, complete with AI capabilities and a rich text editor. Format text, insert images, add hyperlinks, or convert your notes into actionable items.
|
||||
|
||||
- **Analytics**
|
||||
Access real-time insights across all your Plane data. Visualize trends, remove blockers, and keep your projects moving forward.
|
||||
Access real-time insights across all your Plane data. Visualize trends, remove blockers, and keep your projects moving forward.
|
||||
|
||||
- **Drive** (_coming soon_): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution.
|
||||
|
||||
|
||||
## 🛠️ Local development
|
||||
|
||||
See [CONTRIBUTING](./CONTRIBUTING.md)
|
||||
|
||||
## ⚙️ Built with
|
||||
|
||||
[](https://nextjs.org/)
|
||||
[](https://www.djangoproject.com/)
|
||||
[](https://nodejs.org/en)
|
||||
|
||||
## 📸 Screenshots
|
||||
|
||||
<p>
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/w2okwbtu2/Issues_rNZjrGgFl.png?updatedAt=1709298765880"
|
||||
src="https://media.docs.plane.so/GitHub-readme/github-work-items.webp"
|
||||
alt="Plane Views"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/w2okwbtu2/Cycles_jCDhqmTl9.png?updatedAt=1709298780697"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/w2okwbtu2/Modules_PSCVsbSfI.png?updatedAt=1709298796783"
|
||||
src="https://media.docs.plane.so/GitHub-readme/github-cycles.webp"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://media.docs.plane.so/GitHub-readme/github-modules.webp"
|
||||
alt="Plane Cycles and Modules"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -123,7 +115,7 @@ See [CONTRIBUTING](./CONTRIBUTING.md)
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/w2okwbtu2/Views_uxXsRatS4.png?updatedAt=1709298834522"
|
||||
src="https://media.docs.plane.so/GitHub-readme/github-views.webp"
|
||||
alt="Plane Analytics"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -132,25 +124,16 @@ See [CONTRIBUTING](./CONTRIBUTING.md)
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/w2okwbtu2/Analytics_0o22gLRtp.png?updatedAt=1709298834389"
|
||||
src="https://media.docs.plane.so/GitHub-readme/github-analytics.webp"
|
||||
alt="Plane Pages"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
</p>
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/w2okwbtu2/Drive_LlfeY4xn3.png?updatedAt=1709298837917"
|
||||
alt="Plane Command Menu"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
</p>
|
||||
</p>
|
||||
|
||||
## 📝 Documentation
|
||||
|
||||
Explore Plane's [product documentation](https://docs.plane.so/) and [developer documentation](https://developers.plane.so/) to learn about features, setup, and usage.
|
||||
|
||||
## ❤️ Community
|
||||
@@ -186,6 +169,6 @@ Please read [CONTRIBUTING.md](https://github.com/makeplane/plane/blob/master/CON
|
||||
<img src="https://contrib.rocks/image?repo=makeplane/plane" />
|
||||
</a>
|
||||
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the [GNU Affero General Public License v3.0](https://github.com/makeplane/plane/blob/master/LICENSE.txt).
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
FROM node:20-alpine
|
||||
RUN apk add --no-cache libc6-compat
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN yarn global add turbo
|
||||
RUN yarn install
|
||||
|
||||
ENV NEXT_PUBLIC_ADMIN_BASE_PATH="/god-mode"
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
VOLUME [ "/app/node_modules", "/app/admin/node_modules" ]
|
||||
|
||||
CMD ["yarn", "dev", "--filter=admin"]
|
||||
@@ -1,134 +0,0 @@
|
||||
"use client";
|
||||
import { FC } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { Lightbulb } from "lucide-react";
|
||||
import { IFormattedInstanceConfiguration, TInstanceAIConfigurationKeys } from "@plane/types";
|
||||
import { Button, TOAST_TYPE, setToast } from "@plane/ui";
|
||||
// components
|
||||
import { ControllerInput, TControllerInputFormField } from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
|
||||
type IInstanceAIForm = {
|
||||
config: IFormattedInstanceConfiguration;
|
||||
};
|
||||
|
||||
type AIFormValues = Record<TInstanceAIConfigurationKeys, string>;
|
||||
|
||||
export const InstanceAIForm: FC<IInstanceAIForm> = (props) => {
|
||||
const { config } = props;
|
||||
// store
|
||||
const { updateInstanceConfigurations } = useInstance();
|
||||
// form data
|
||||
const {
|
||||
handleSubmit,
|
||||
control,
|
||||
formState: { errors, isSubmitting },
|
||||
} = useForm<AIFormValues>({
|
||||
defaultValues: {
|
||||
LLM_API_KEY: config["LLM_API_KEY"],
|
||||
LLM_MODEL: config["LLM_MODEL"],
|
||||
},
|
||||
});
|
||||
|
||||
const aiFormFields: TControllerInputFormField[] = [
|
||||
{
|
||||
key: "LLM_MODEL",
|
||||
type: "text",
|
||||
label: "LLM Model",
|
||||
description: (
|
||||
<>
|
||||
Choose an OpenAI engine.{" "}
|
||||
<a
|
||||
href="https://platform.openai.com/docs/models/overview"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
Learn more
|
||||
</a>
|
||||
</>
|
||||
),
|
||||
placeholder: "gpt-4o-mini",
|
||||
error: Boolean(errors.LLM_MODEL),
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
key: "LLM_API_KEY",
|
||||
type: "password",
|
||||
label: "API key",
|
||||
description: (
|
||||
<>
|
||||
You will find your API key{" "}
|
||||
<a
|
||||
href="https://platform.openai.com/api-keys"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
here.
|
||||
</a>
|
||||
</>
|
||||
),
|
||||
placeholder: "sk-asddassdfasdefqsdfasd23das3dasdcasd",
|
||||
error: Boolean(errors.LLM_API_KEY),
|
||||
required: false,
|
||||
},
|
||||
];
|
||||
|
||||
const onSubmit = async (formData: AIFormValues) => {
|
||||
const payload: Partial<AIFormValues> = { ...formData };
|
||||
|
||||
await updateInstanceConfigurations(payload)
|
||||
.then(() =>
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Success",
|
||||
message: "AI Settings updated successfully",
|
||||
})
|
||||
)
|
||||
.catch((err) => console.error(err));
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
<div className="space-y-3">
|
||||
<div>
|
||||
<div className="pb-1 text-xl font-medium text-custom-text-100">OpenAI</div>
|
||||
<div className="text-sm font-normal text-custom-text-300">If you use ChatGPT, this is for you.</div>
|
||||
</div>
|
||||
<div className="grid-col grid w-full grid-cols-1 items-center justify-between gap-x-12 gap-y-8 lg:grid-cols-3">
|
||||
{aiFormFields.map((field) => (
|
||||
<ControllerInput
|
||||
key={field.key}
|
||||
control={control}
|
||||
type={field.type}
|
||||
name={field.key}
|
||||
label={field.label}
|
||||
description={field.description}
|
||||
placeholder={field.placeholder}
|
||||
error={field.error}
|
||||
required={field.required}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="space-y-4">
|
||||
<Button variant="primary" onClick={handleSubmit(onSubmit)} loading={isSubmitting}>
|
||||
{isSubmitting ? "Saving..." : "Save changes"}
|
||||
</Button>
|
||||
|
||||
<div className="relative inline-flex items-center gap-2 rounded border border-custom-primary-100/20 bg-custom-primary-100/10 px-4 py-2 text-xs text-custom-primary-200">
|
||||
<Lightbulb height="14" width="14" />
|
||||
<div>
|
||||
If you have a preferred AI models vendor, please get in{" "}
|
||||
<a className="underline font-medium" href="https://plane.so/contact">
|
||||
touch with us.
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -1,11 +0,0 @@
|
||||
import { ReactNode } from "react";
|
||||
import { Metadata } from "next";
|
||||
import { AdminLayout } from "@/layouts/admin-layout";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Artificial Intelligence Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function AILayout({ children }: { children: ReactNode }) {
|
||||
return <AdminLayout>{children}</AdminLayout>;
|
||||
}
|
||||
@@ -1,227 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { FC, useState } from "react";
|
||||
import isEmpty from "lodash/isEmpty";
|
||||
import Link from "next/link";
|
||||
import { useForm } from "react-hook-form";
|
||||
// plane internal packages
|
||||
import { API_BASE_URL } from "@plane/constants";
|
||||
import { IFormattedInstanceConfiguration, TInstanceGithubAuthenticationConfigurationKeys } from "@plane/types";
|
||||
import { Button, TOAST_TYPE, getButtonStyling, setToast } from "@plane/ui";
|
||||
import { cn } from "@plane/utils";
|
||||
// components
|
||||
import {
|
||||
CodeBlock,
|
||||
ConfirmDiscardModal,
|
||||
ControllerInput,
|
||||
CopyField,
|
||||
TControllerInputFormField,
|
||||
TCopyField,
|
||||
} from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
|
||||
type Props = {
|
||||
config: IFormattedInstanceConfiguration;
|
||||
};
|
||||
|
||||
type GithubConfigFormValues = Record<TInstanceGithubAuthenticationConfigurationKeys, string>;
|
||||
|
||||
export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
const { config } = props;
|
||||
// states
|
||||
const [isDiscardChangesModalOpen, setIsDiscardChangesModalOpen] = useState(false);
|
||||
// store hooks
|
||||
const { updateInstanceConfigurations } = useInstance();
|
||||
// form data
|
||||
const {
|
||||
handleSubmit,
|
||||
control,
|
||||
reset,
|
||||
formState: { errors, isDirty, isSubmitting },
|
||||
} = useForm<GithubConfigFormValues>({
|
||||
defaultValues: {
|
||||
GITHUB_CLIENT_ID: config["GITHUB_CLIENT_ID"],
|
||||
GITHUB_CLIENT_SECRET: config["GITHUB_CLIENT_SECRET"],
|
||||
GITHUB_ORGANIZATION_ID: config["GITHUB_ORGANIZATION_ID"],
|
||||
},
|
||||
});
|
||||
|
||||
const originURL = !isEmpty(API_BASE_URL) ? API_BASE_URL : typeof window !== "undefined" ? window.location.origin : "";
|
||||
|
||||
const GITHUB_FORM_FIELDS: TControllerInputFormField[] = [
|
||||
{
|
||||
key: "GITHUB_CLIENT_ID",
|
||||
type: "text",
|
||||
label: "Client ID",
|
||||
description: (
|
||||
<>
|
||||
You will get this from your{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://github.com/settings/applications/new"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
GitHub OAuth application settings.
|
||||
</a>
|
||||
</>
|
||||
),
|
||||
placeholder: "70a44354520df8bd9bcd",
|
||||
error: Boolean(errors.GITHUB_CLIENT_ID),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "GITHUB_CLIENT_SECRET",
|
||||
type: "password",
|
||||
label: "Client secret",
|
||||
description: (
|
||||
<>
|
||||
Your client secret is also found in your{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://github.com/settings/applications/new"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
GitHub OAuth application settings.
|
||||
</a>
|
||||
</>
|
||||
),
|
||||
placeholder: "9b0050f94ec1b744e32ce79ea4ffacd40d4119cb",
|
||||
error: Boolean(errors.GITHUB_CLIENT_SECRET),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "GITHUB_ORGANIZATION_ID",
|
||||
type: "text",
|
||||
label: "Organization ID",
|
||||
description: <>The organization github ID.</>,
|
||||
placeholder: "123456789",
|
||||
error: Boolean(errors.GITHUB_ORGANIZATION_ID),
|
||||
required: false,
|
||||
},
|
||||
];
|
||||
|
||||
const GITHUB_SERVICE_FIELD: TCopyField[] = [
|
||||
{
|
||||
key: "Origin_URL",
|
||||
label: "Origin URL",
|
||||
url: originURL,
|
||||
description: (
|
||||
<>
|
||||
We will auto-generate this. Paste this into the <CodeBlock darkerShade>Authorized origin URL</CodeBlock> field{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://github.com/settings/applications/new"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
here.
|
||||
</a>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: "Callback_URI",
|
||||
label: "Callback URI",
|
||||
url: `${originURL}/auth/github/callback/`,
|
||||
description: (
|
||||
<>
|
||||
We will auto-generate this. Paste this into your <CodeBlock darkerShade>Authorized Callback URI</CodeBlock>{" "}
|
||||
field{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://github.com/settings/applications/new"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
here.
|
||||
</a>
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
const onSubmit = async (formData: GithubConfigFormValues) => {
|
||||
const payload: Partial<GithubConfigFormValues> = { ...formData };
|
||||
|
||||
await updateInstanceConfigurations(payload)
|
||||
.then((response = []) => {
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Done!",
|
||||
message: "Your GitHub authentication is configured. You should test it now.",
|
||||
});
|
||||
reset({
|
||||
GITHUB_CLIENT_ID: response.find((item) => item.key === "GITHUB_CLIENT_ID")?.value,
|
||||
GITHUB_CLIENT_SECRET: response.find((item) => item.key === "GITHUB_CLIENT_SECRET")?.value,
|
||||
GITHUB_ORGANIZATION_ID: response.find((item) => item.key === "GITHUB_ORGANIZATION_ID")?.value,
|
||||
});
|
||||
})
|
||||
.catch((err) => console.error(err));
|
||||
};
|
||||
|
||||
const handleGoBack = (e: React.MouseEvent<HTMLAnchorElement, MouseEvent>) => {
|
||||
if (isDirty) {
|
||||
e.preventDefault();
|
||||
setIsDiscardChangesModalOpen(true);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<ConfirmDiscardModal
|
||||
isOpen={isDiscardChangesModalOpen}
|
||||
onDiscardHref="/authentication"
|
||||
handleClose={() => setIsDiscardChangesModalOpen(false)}
|
||||
/>
|
||||
<div className="flex flex-col gap-8">
|
||||
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1 pt-1">
|
||||
<div className="pt-2.5 text-xl font-medium">GitHub-provided details for Plane</div>
|
||||
{GITHUB_FORM_FIELDS.map((field) => (
|
||||
<ControllerInput
|
||||
key={field.key}
|
||||
control={control}
|
||||
type={field.type}
|
||||
name={field.key}
|
||||
label={field.label}
|
||||
description={field.description}
|
||||
placeholder={field.placeholder}
|
||||
error={field.error}
|
||||
required={field.required}
|
||||
/>
|
||||
))}
|
||||
<div className="flex flex-col gap-1 pt-4">
|
||||
<div className="flex items-center gap-4">
|
||||
<Button variant="primary" onClick={handleSubmit(onSubmit)} loading={isSubmitting} disabled={!isDirty}>
|
||||
{isSubmitting ? "Saving..." : "Save changes"}
|
||||
</Button>
|
||||
<Link
|
||||
href="/authentication"
|
||||
className={cn(getButtonStyling("neutral-primary", "md"), "font-medium")}
|
||||
onClick={handleGoBack}
|
||||
>
|
||||
Go back
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="col-span-2 md:col-span-1">
|
||||
<div className="flex flex-col gap-y-4 px-6 pt-1.5 pb-4 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Plane-provided details for GitHub</div>
|
||||
{GITHUB_SERVICE_FIELD.map((field) => (
|
||||
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -1,114 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { observer } from "mobx-react";
|
||||
import Image from "next/image";
|
||||
import { useTheme } from "next-themes";
|
||||
import useSWR from "swr";
|
||||
// plane internal packages
|
||||
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
|
||||
import { resolveGeneralTheme } from "@plane/utils";
|
||||
// components
|
||||
import { AuthenticationMethodCard } from "@/components/authentication";
|
||||
import { PageHeader } from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// icons
|
||||
import githubLightModeImage from "@/public/logos/github-black.png";
|
||||
import githubDarkModeImage from "@/public/logos/github-white.png";
|
||||
// local components
|
||||
import { InstanceGithubConfigForm } from "./form";
|
||||
|
||||
const InstanceGithubAuthenticationPage = observer(() => {
|
||||
// store
|
||||
const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
|
||||
// state
|
||||
const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
|
||||
// theme
|
||||
const { resolvedTheme } = useTheme();
|
||||
// config
|
||||
const enableGithubConfig = formattedConfig?.IS_GITHUB_ENABLED ?? "";
|
||||
|
||||
useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
|
||||
|
||||
const updateConfig = async (key: "IS_GITHUB_ENABLED", value: string) => {
|
||||
setIsSubmitting(true);
|
||||
|
||||
const payload = {
|
||||
[key]: value,
|
||||
};
|
||||
|
||||
const updateConfigPromise = updateInstanceConfigurations(payload);
|
||||
|
||||
setPromiseToast(updateConfigPromise, {
|
||||
loading: "Saving Configuration...",
|
||||
success: {
|
||||
title: "Configuration saved",
|
||||
message: () => `GitHub authentication is now ${value ? "active" : "disabled"}.`,
|
||||
},
|
||||
error: {
|
||||
title: "Error",
|
||||
message: () => "Failed to save configuration",
|
||||
},
|
||||
});
|
||||
|
||||
await updateConfigPromise
|
||||
.then(() => {
|
||||
setIsSubmitting(false);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
setIsSubmitting(false);
|
||||
});
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="GitHub Authentication - Plane Web" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<AuthenticationMethodCard
|
||||
name="GitHub"
|
||||
description="Allow members to login or sign up to plane with their GitHub accounts."
|
||||
icon={
|
||||
<Image
|
||||
src={resolveGeneralTheme(resolvedTheme) === "dark" ? githubDarkModeImage : githubLightModeImage}
|
||||
height={24}
|
||||
width={24}
|
||||
alt="GitHub Logo"
|
||||
/>
|
||||
}
|
||||
config={
|
||||
<ToggleSwitch
|
||||
value={Boolean(parseInt(enableGithubConfig))}
|
||||
onChange={() => {
|
||||
Boolean(parseInt(enableGithubConfig)) === true
|
||||
? updateConfig("IS_GITHUB_ENABLED", "0")
|
||||
: updateConfig("IS_GITHUB_ENABLED", "1");
|
||||
}}
|
||||
size="sm"
|
||||
disabled={isSubmitting || !formattedConfig}
|
||||
/>
|
||||
}
|
||||
disabled={isSubmitting || !formattedConfig}
|
||||
withBorder={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<InstanceGithubConfigForm config={formattedConfig} />
|
||||
) : (
|
||||
<Loader className="space-y-8">
|
||||
<Loader.Item height="50px" width="25%" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" width="50%" />
|
||||
</Loader>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
});
|
||||
|
||||
export default InstanceGithubAuthenticationPage;
|
||||
@@ -1,212 +0,0 @@
|
||||
import { FC, useState } from "react";
|
||||
import isEmpty from "lodash/isEmpty";
|
||||
import Link from "next/link";
|
||||
import { useForm } from "react-hook-form";
|
||||
// plane internal packages
|
||||
import { API_BASE_URL } from "@plane/constants";
|
||||
import { IFormattedInstanceConfiguration, TInstanceGitlabAuthenticationConfigurationKeys } from "@plane/types";
|
||||
import { Button, TOAST_TYPE, getButtonStyling, setToast } from "@plane/ui";
|
||||
import { cn } from "@plane/utils";
|
||||
// components
|
||||
import {
|
||||
CodeBlock,
|
||||
ConfirmDiscardModal,
|
||||
ControllerInput,
|
||||
CopyField,
|
||||
TControllerInputFormField,
|
||||
TCopyField,
|
||||
} from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
|
||||
type Props = {
|
||||
config: IFormattedInstanceConfiguration;
|
||||
};
|
||||
|
||||
type GitlabConfigFormValues = Record<TInstanceGitlabAuthenticationConfigurationKeys, string>;
|
||||
|
||||
export const InstanceGitlabConfigForm: FC<Props> = (props) => {
|
||||
const { config } = props;
|
||||
// states
|
||||
const [isDiscardChangesModalOpen, setIsDiscardChangesModalOpen] = useState(false);
|
||||
// store hooks
|
||||
const { updateInstanceConfigurations } = useInstance();
|
||||
// form data
|
||||
const {
|
||||
handleSubmit,
|
||||
control,
|
||||
reset,
|
||||
formState: { errors, isDirty, isSubmitting },
|
||||
} = useForm<GitlabConfigFormValues>({
|
||||
defaultValues: {
|
||||
GITLAB_HOST: config["GITLAB_HOST"],
|
||||
GITLAB_CLIENT_ID: config["GITLAB_CLIENT_ID"],
|
||||
GITLAB_CLIENT_SECRET: config["GITLAB_CLIENT_SECRET"],
|
||||
},
|
||||
});
|
||||
|
||||
const originURL = !isEmpty(API_BASE_URL) ? API_BASE_URL : typeof window !== "undefined" ? window.location.origin : "";
|
||||
|
||||
const GITLAB_FORM_FIELDS: TControllerInputFormField[] = [
|
||||
{
|
||||
key: "GITLAB_HOST",
|
||||
type: "text",
|
||||
label: "Host",
|
||||
description: (
|
||||
<>
|
||||
This is either https://gitlab.com or the <CodeBlock>domain.tld</CodeBlock> where you host GitLab.
|
||||
</>
|
||||
),
|
||||
placeholder: "https://gitlab.com",
|
||||
error: Boolean(errors.GITLAB_HOST),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "GITLAB_CLIENT_ID",
|
||||
type: "text",
|
||||
label: "Application ID",
|
||||
description: (
|
||||
<>
|
||||
Get this from your{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://docs.gitlab.com/ee/integration/oauth_provider.html"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
GitLab OAuth application settings
|
||||
</a>
|
||||
.
|
||||
</>
|
||||
),
|
||||
placeholder: "c2ef2e7fc4e9d15aa7630f5637d59e8e4a27ff01dceebdb26b0d267b9adcf3c3",
|
||||
error: Boolean(errors.GITLAB_CLIENT_ID),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "GITLAB_CLIENT_SECRET",
|
||||
type: "password",
|
||||
label: "Secret",
|
||||
description: (
|
||||
<>
|
||||
The client secret is also found in your{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://docs.gitlab.com/ee/integration/oauth_provider.html"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
GitLab OAuth application settings
|
||||
</a>
|
||||
.
|
||||
</>
|
||||
),
|
||||
placeholder: "gloas-f79cfa9a03c97f6ffab303177a5a6778a53c61e3914ba093412f68a9298a1b28",
|
||||
error: Boolean(errors.GITLAB_CLIENT_SECRET),
|
||||
required: true,
|
||||
},
|
||||
];
|
||||
|
||||
const GITLAB_SERVICE_FIELD: TCopyField[] = [
|
||||
{
|
||||
key: "Callback_URL",
|
||||
label: "Callback URL",
|
||||
url: `${originURL}/auth/gitlab/callback/`,
|
||||
description: (
|
||||
<>
|
||||
We will auto-generate this. Paste this into the <CodeBlock darkerShade>Redirect URI</CodeBlock> field of your{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://docs.gitlab.com/ee/integration/oauth_provider.html"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
GitLab OAuth application
|
||||
</a>
|
||||
.
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
const onSubmit = async (formData: GitlabConfigFormValues) => {
|
||||
const payload: Partial<GitlabConfigFormValues> = { ...formData };
|
||||
|
||||
await updateInstanceConfigurations(payload)
|
||||
.then((response = []) => {
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Done!",
|
||||
message: "Your GitLab authentication is configured. You should test it now.",
|
||||
});
|
||||
reset({
|
||||
GITLAB_HOST: response.find((item) => item.key === "GITLAB_HOST")?.value,
|
||||
GITLAB_CLIENT_ID: response.find((item) => item.key === "GITLAB_CLIENT_ID")?.value,
|
||||
GITLAB_CLIENT_SECRET: response.find((item) => item.key === "GITLAB_CLIENT_SECRET")?.value,
|
||||
});
|
||||
})
|
||||
.catch((err) => console.error(err));
|
||||
};
|
||||
|
||||
const handleGoBack = (e: React.MouseEvent<HTMLAnchorElement, MouseEvent>) => {
|
||||
if (isDirty) {
|
||||
e.preventDefault();
|
||||
setIsDiscardChangesModalOpen(true);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<ConfirmDiscardModal
|
||||
isOpen={isDiscardChangesModalOpen}
|
||||
onDiscardHref="/authentication"
|
||||
handleClose={() => setIsDiscardChangesModalOpen(false)}
|
||||
/>
|
||||
<div className="flex flex-col gap-8">
|
||||
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1 pt-1">
|
||||
<div className="pt-2.5 text-xl font-medium">GitLab-provided details for Plane</div>
|
||||
{GITLAB_FORM_FIELDS.map((field) => (
|
||||
<ControllerInput
|
||||
key={field.key}
|
||||
control={control}
|
||||
type={field.type}
|
||||
name={field.key}
|
||||
label={field.label}
|
||||
description={field.description}
|
||||
placeholder={field.placeholder}
|
||||
error={field.error}
|
||||
required={field.required}
|
||||
/>
|
||||
))}
|
||||
<div className="flex flex-col gap-1 pt-4">
|
||||
<div className="flex items-center gap-4">
|
||||
<Button variant="primary" onClick={handleSubmit(onSubmit)} loading={isSubmitting} disabled={!isDirty}>
|
||||
{isSubmitting ? "Saving..." : "Save changes"}
|
||||
</Button>
|
||||
<Link
|
||||
href="/authentication"
|
||||
className={cn(getButtonStyling("neutral-primary", "md"), "font-medium")}
|
||||
onClick={handleGoBack}
|
||||
>
|
||||
Go back
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="col-span-2 md:col-span-1">
|
||||
<div className="flex flex-col gap-y-4 px-6 pt-1.5 pb-4 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Plane-provided details for GitLab</div>
|
||||
{GITLAB_SERVICE_FIELD.map((field) => (
|
||||
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -1,101 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { observer } from "mobx-react";
|
||||
import Image from "next/image";
|
||||
import useSWR from "swr";
|
||||
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
|
||||
// components
|
||||
import { AuthenticationMethodCard } from "@/components/authentication";
|
||||
import { PageHeader } from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// icons
|
||||
import GitlabLogo from "@/public/logos/gitlab-logo.svg";
|
||||
// local components
|
||||
import { InstanceGitlabConfigForm } from "./form";
|
||||
|
||||
const InstanceGitlabAuthenticationPage = observer(() => {
|
||||
// store
|
||||
const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
|
||||
// state
|
||||
const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
|
||||
// config
|
||||
const enableGitlabConfig = formattedConfig?.IS_GITLAB_ENABLED ?? "";
|
||||
|
||||
useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
|
||||
|
||||
const updateConfig = async (key: "IS_GITLAB_ENABLED", value: string) => {
|
||||
setIsSubmitting(true);
|
||||
|
||||
const payload = {
|
||||
[key]: value,
|
||||
};
|
||||
|
||||
const updateConfigPromise = updateInstanceConfigurations(payload);
|
||||
|
||||
setPromiseToast(updateConfigPromise, {
|
||||
loading: "Saving Configuration...",
|
||||
success: {
|
||||
title: "Configuration saved",
|
||||
message: () => `GitLab authentication is now ${value ? "active" : "disabled"}.`,
|
||||
},
|
||||
error: {
|
||||
title: "Error",
|
||||
message: () => "Failed to save configuration",
|
||||
},
|
||||
});
|
||||
|
||||
await updateConfigPromise
|
||||
.then(() => {
|
||||
setIsSubmitting(false);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
setIsSubmitting(false);
|
||||
});
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="GitLab Authentication - Plane Web" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<AuthenticationMethodCard
|
||||
name="GitLab"
|
||||
description="Allow members to login or sign up to plane with their GitLab accounts."
|
||||
icon={<Image src={GitlabLogo} height={24} width={24} alt="GitLab Logo" />}
|
||||
config={
|
||||
<ToggleSwitch
|
||||
value={Boolean(parseInt(enableGitlabConfig))}
|
||||
onChange={() => {
|
||||
Boolean(parseInt(enableGitlabConfig)) === true
|
||||
? updateConfig("IS_GITLAB_ENABLED", "0")
|
||||
: updateConfig("IS_GITLAB_ENABLED", "1");
|
||||
}}
|
||||
size="sm"
|
||||
disabled={isSubmitting || !formattedConfig}
|
||||
/>
|
||||
}
|
||||
disabled={isSubmitting || !formattedConfig}
|
||||
withBorder={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<InstanceGitlabConfigForm config={formattedConfig} />
|
||||
) : (
|
||||
<Loader className="space-y-8">
|
||||
<Loader.Item height="50px" width="25%" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" width="50%" />
|
||||
</Loader>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
});
|
||||
|
||||
export default InstanceGitlabAuthenticationPage;
|
||||
@@ -1,214 +0,0 @@
|
||||
"use client";
|
||||
import { FC, useState } from "react";
|
||||
import isEmpty from "lodash/isEmpty";
|
||||
import Link from "next/link";
|
||||
import { useForm } from "react-hook-form";
|
||||
// plane internal packages
|
||||
import { API_BASE_URL } from "@plane/constants";
|
||||
import { IFormattedInstanceConfiguration, TInstanceGoogleAuthenticationConfigurationKeys } from "@plane/types";
|
||||
import { Button, TOAST_TYPE, getButtonStyling, setToast } from "@plane/ui";
|
||||
import { cn } from "@plane/utils";
|
||||
// components
|
||||
import {
|
||||
CodeBlock,
|
||||
ConfirmDiscardModal,
|
||||
ControllerInput,
|
||||
CopyField,
|
||||
TControllerInputFormField,
|
||||
TCopyField,
|
||||
} from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
|
||||
type Props = {
|
||||
config: IFormattedInstanceConfiguration;
|
||||
};
|
||||
|
||||
type GoogleConfigFormValues = Record<TInstanceGoogleAuthenticationConfigurationKeys, string>;
|
||||
|
||||
export const InstanceGoogleConfigForm: FC<Props> = (props) => {
|
||||
const { config } = props;
|
||||
// states
|
||||
const [isDiscardChangesModalOpen, setIsDiscardChangesModalOpen] = useState(false);
|
||||
// store hooks
|
||||
const { updateInstanceConfigurations } = useInstance();
|
||||
// form data
|
||||
const {
|
||||
handleSubmit,
|
||||
control,
|
||||
reset,
|
||||
formState: { errors, isDirty, isSubmitting },
|
||||
} = useForm<GoogleConfigFormValues>({
|
||||
defaultValues: {
|
||||
GOOGLE_CLIENT_ID: config["GOOGLE_CLIENT_ID"],
|
||||
GOOGLE_CLIENT_SECRET: config["GOOGLE_CLIENT_SECRET"],
|
||||
},
|
||||
});
|
||||
|
||||
const originURL = !isEmpty(API_BASE_URL) ? API_BASE_URL : typeof window !== "undefined" ? window.location.origin : "";
|
||||
|
||||
const GOOGLE_FORM_FIELDS: TControllerInputFormField[] = [
|
||||
{
|
||||
key: "GOOGLE_CLIENT_ID",
|
||||
type: "text",
|
||||
label: "Client ID",
|
||||
description: (
|
||||
<>
|
||||
Your client ID lives in your Google API Console.{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://developers.google.com/identity/protocols/oauth2/javascript-implicit-flow#creatingcred"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
Learn more
|
||||
</a>
|
||||
</>
|
||||
),
|
||||
placeholder: "840195096245-0p2tstej9j5nc4l8o1ah2dqondscqc1g.apps.googleusercontent.com",
|
||||
error: Boolean(errors.GOOGLE_CLIENT_ID),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "GOOGLE_CLIENT_SECRET",
|
||||
type: "password",
|
||||
label: "Client secret",
|
||||
description: (
|
||||
<>
|
||||
Your client secret should also be in your Google API Console.{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://developers.google.com/identity/oauth2/web/guides/get-google-api-clientid"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
Learn more
|
||||
</a>
|
||||
</>
|
||||
),
|
||||
placeholder: "GOCShX-ADp4cI0kPqav1gGCBg5bE02E",
|
||||
error: Boolean(errors.GOOGLE_CLIENT_SECRET),
|
||||
required: true,
|
||||
},
|
||||
];
|
||||
|
||||
const GOOGLE_SERVICE_DETAILS: TCopyField[] = [
|
||||
{
|
||||
key: "Origin_URL",
|
||||
label: "Origin URL",
|
||||
url: originURL,
|
||||
description: (
|
||||
<p>
|
||||
We will auto-generate this. Paste this into your{" "}
|
||||
<CodeBlock darkerShade>Authorized JavaScript origins</CodeBlock> field. For this OAuth client{" "}
|
||||
<a
|
||||
href="https://console.cloud.google.com/apis/credentials/oauthclient"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
here.
|
||||
</a>
|
||||
</p>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: "Callback_URI",
|
||||
label: "Callback URI",
|
||||
url: `${originURL}/auth/google/callback/`,
|
||||
description: (
|
||||
<p>
|
||||
We will auto-generate this. Paste this into your <CodeBlock darkerShade>Authorized Redirect URI</CodeBlock>{" "}
|
||||
field. For this OAuth client{" "}
|
||||
<a
|
||||
href="https://console.cloud.google.com/apis/credentials/oauthclient"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
here.
|
||||
</a>
|
||||
</p>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
const onSubmit = async (formData: GoogleConfigFormValues) => {
|
||||
const payload: Partial<GoogleConfigFormValues> = { ...formData };
|
||||
|
||||
await updateInstanceConfigurations(payload)
|
||||
.then((response = []) => {
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Done!",
|
||||
message: "Your Google authentication is configured. You should test it now.",
|
||||
});
|
||||
reset({
|
||||
GOOGLE_CLIENT_ID: response.find((item) => item.key === "GOOGLE_CLIENT_ID")?.value,
|
||||
GOOGLE_CLIENT_SECRET: response.find((item) => item.key === "GOOGLE_CLIENT_SECRET")?.value,
|
||||
});
|
||||
})
|
||||
.catch((err) => console.error(err));
|
||||
};
|
||||
|
||||
const handleGoBack = (e: React.MouseEvent<HTMLAnchorElement, MouseEvent>) => {
|
||||
if (isDirty) {
|
||||
e.preventDefault();
|
||||
setIsDiscardChangesModalOpen(true);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<ConfirmDiscardModal
|
||||
isOpen={isDiscardChangesModalOpen}
|
||||
onDiscardHref="/authentication"
|
||||
handleClose={() => setIsDiscardChangesModalOpen(false)}
|
||||
/>
|
||||
<div className="flex flex-col gap-8">
|
||||
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1 pt-1">
|
||||
<div className="pt-2.5 text-xl font-medium">Google-provided details for Plane</div>
|
||||
{GOOGLE_FORM_FIELDS.map((field) => (
|
||||
<ControllerInput
|
||||
key={field.key}
|
||||
control={control}
|
||||
type={field.type}
|
||||
name={field.key}
|
||||
label={field.label}
|
||||
description={field.description}
|
||||
placeholder={field.placeholder}
|
||||
error={field.error}
|
||||
required={field.required}
|
||||
/>
|
||||
))}
|
||||
<div className="flex flex-col gap-1 pt-4">
|
||||
<div className="flex items-center gap-4">
|
||||
<Button variant="primary" onClick={handleSubmit(onSubmit)} loading={isSubmitting} disabled={!isDirty}>
|
||||
{isSubmitting ? "Saving..." : "Save changes"}
|
||||
</Button>
|
||||
<Link
|
||||
href="/authentication"
|
||||
className={cn(getButtonStyling("neutral-primary", "md"), "font-medium")}
|
||||
onClick={handleGoBack}
|
||||
>
|
||||
Go back
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="col-span-2 md:col-span-1">
|
||||
<div className="flex flex-col gap-y-4 px-6 pt-1.5 pb-4 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Plane-provided details for Google</div>
|
||||
{GOOGLE_SERVICE_DETAILS.map((field) => (
|
||||
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -1,102 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { observer } from "mobx-react";
|
||||
import Image from "next/image";
|
||||
import useSWR from "swr";
|
||||
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
|
||||
// components
|
||||
import { AuthenticationMethodCard } from "@/components/authentication";
|
||||
import { PageHeader } from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// icons
|
||||
import GoogleLogo from "@/public/logos/google-logo.svg";
|
||||
// local components
|
||||
import { InstanceGoogleConfigForm } from "./form";
|
||||
|
||||
const InstanceGoogleAuthenticationPage = observer(() => {
|
||||
// store
|
||||
const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
|
||||
// state
|
||||
const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
|
||||
// config
|
||||
const enableGoogleConfig = formattedConfig?.IS_GOOGLE_ENABLED ?? "";
|
||||
|
||||
useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
|
||||
|
||||
const updateConfig = async (key: "IS_GOOGLE_ENABLED", value: string) => {
|
||||
setIsSubmitting(true);
|
||||
|
||||
const payload = {
|
||||
[key]: value,
|
||||
};
|
||||
|
||||
const updateConfigPromise = updateInstanceConfigurations(payload);
|
||||
|
||||
setPromiseToast(updateConfigPromise, {
|
||||
loading: "Saving Configuration...",
|
||||
success: {
|
||||
title: "Configuration saved",
|
||||
message: () => `Google authentication is now ${value ? "active" : "disabled"}.`,
|
||||
},
|
||||
error: {
|
||||
title: "Error",
|
||||
message: () => "Failed to save configuration",
|
||||
},
|
||||
});
|
||||
|
||||
await updateConfigPromise
|
||||
.then(() => {
|
||||
setIsSubmitting(false);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
setIsSubmitting(false);
|
||||
});
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Google Authentication - Plane Web" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<AuthenticationMethodCard
|
||||
name="Google"
|
||||
description="Allow members to login or sign up to plane with their Google
|
||||
accounts."
|
||||
icon={<Image src={GoogleLogo} height={24} width={24} alt="Google Logo" />}
|
||||
config={
|
||||
<ToggleSwitch
|
||||
value={Boolean(parseInt(enableGoogleConfig))}
|
||||
onChange={() => {
|
||||
Boolean(parseInt(enableGoogleConfig)) === true
|
||||
? updateConfig("IS_GOOGLE_ENABLED", "0")
|
||||
: updateConfig("IS_GOOGLE_ENABLED", "1");
|
||||
}}
|
||||
size="sm"
|
||||
disabled={isSubmitting || !formattedConfig}
|
||||
/>
|
||||
}
|
||||
disabled={isSubmitting || !formattedConfig}
|
||||
withBorder={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<InstanceGoogleConfigForm config={formattedConfig} />
|
||||
) : (
|
||||
<Loader className="space-y-8">
|
||||
<Loader.Item height="50px" width="25%" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" />
|
||||
<Loader.Item height="50px" width="50%" />
|
||||
</Loader>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
});
|
||||
|
||||
export default InstanceGoogleAuthenticationPage;
|
||||
@@ -1,11 +0,0 @@
|
||||
import { ReactNode } from "react";
|
||||
import { Metadata } from "next";
|
||||
import { AdminLayout } from "@/layouts/admin-layout";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Authentication Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function AuthenticationLayout({ children }: { children: ReactNode }) {
|
||||
return <AdminLayout>{children}</AdminLayout>;
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
import { ReactNode } from "react";
|
||||
import { Metadata } from "next";
|
||||
import { AdminLayout } from "@/layouts/admin-layout";
|
||||
|
||||
interface EmailLayoutProps {
|
||||
children: ReactNode;
|
||||
}
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Email Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function EmailLayout({ children }: EmailLayoutProps) {
|
||||
return <AdminLayout>{children}</AdminLayout>;
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { observer } from "mobx-react";
|
||||
import useSWR from "swr";
|
||||
import { Loader } from "@plane/ui";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// components
|
||||
import { InstanceEmailForm } from "./email-config-form";
|
||||
|
||||
const InstanceEmailPage = observer(() => {
|
||||
// store
|
||||
const { fetchInstanceConfigurations, formattedConfig } = useInstance();
|
||||
|
||||
useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<div className="text-xl font-medium text-custom-text-100">Secure emails from your own instance</div>
|
||||
<div className="text-sm font-normal text-custom-text-300">
|
||||
Plane can send useful emails to you and your users from your own instance without talking to the Internet.
|
||||
<div className="text-sm font-normal text-custom-text-300">
|
||||
Set it up below and please test your settings before you save them.
|
||||
<span className="text-red-400">Misconfigs can lead to email bounces and errors.</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<InstanceEmailForm config={formattedConfig} />
|
||||
) : (
|
||||
<Loader className="space-y-10">
|
||||
<Loader.Item height="50px" width="75%" />
|
||||
<Loader.Item height="50px" width="75%" />
|
||||
<Loader.Item height="50px" width="40%" />
|
||||
<Loader.Item height="50px" width="40%" />
|
||||
<Loader.Item height="50px" width="20%" />
|
||||
</Loader>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
});
|
||||
|
||||
export default InstanceEmailPage;
|
||||
@@ -1,155 +0,0 @@
|
||||
"use client";
|
||||
import { FC } from "react";
|
||||
import { observer } from "mobx-react";
|
||||
import { Controller, useForm } from "react-hook-form";
|
||||
import { Telescope } from "lucide-react";
|
||||
// types
|
||||
import { IInstance, IInstanceAdmin } from "@plane/types";
|
||||
// ui
|
||||
import { Button, Input, TOAST_TYPE, ToggleSwitch, setToast } from "@plane/ui";
|
||||
// components
|
||||
import { ControllerInput } from "@/components/common";
|
||||
import { useInstance } from "@/hooks/store";
|
||||
import { IntercomConfig } from "./intercom";
|
||||
// hooks
|
||||
|
||||
export interface IGeneralConfigurationForm {
|
||||
instance: IInstance;
|
||||
instanceAdmins: IInstanceAdmin[];
|
||||
}
|
||||
|
||||
export const GeneralConfigurationForm: FC<IGeneralConfigurationForm> = observer((props) => {
|
||||
const { instance, instanceAdmins } = props;
|
||||
// hooks
|
||||
const { instanceConfigurations, updateInstanceInfo, updateInstanceConfigurations } = useInstance();
|
||||
|
||||
// form data
|
||||
const {
|
||||
handleSubmit,
|
||||
control,
|
||||
watch,
|
||||
formState: { errors, isSubmitting },
|
||||
} = useForm<Partial<IInstance>>({
|
||||
defaultValues: {
|
||||
instance_name: instance?.instance_name,
|
||||
is_telemetry_enabled: instance?.is_telemetry_enabled,
|
||||
},
|
||||
});
|
||||
|
||||
const onSubmit = async (formData: Partial<IInstance>) => {
|
||||
const payload: Partial<IInstance> = { ...formData };
|
||||
|
||||
// update the intercom configuration
|
||||
const isIntercomEnabled =
|
||||
instanceConfigurations?.find((config) => config.key === "IS_INTERCOM_ENABLED")?.value === "1";
|
||||
if (!payload.is_telemetry_enabled && isIntercomEnabled) {
|
||||
try {
|
||||
await updateInstanceConfigurations({ IS_INTERCOM_ENABLED: "0" });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
await updateInstanceInfo(payload)
|
||||
.then(() =>
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Success",
|
||||
message: "Settings updated successfully",
|
||||
})
|
||||
)
|
||||
.catch((err) => console.error(err));
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
<div className="space-y-3">
|
||||
<div className="text-lg font-medium">Instance details</div>
|
||||
<div className="grid-col grid w-full grid-cols-1 items-center justify-between gap-8 md:grid-cols-2 lg:grid-cols-3">
|
||||
<ControllerInput
|
||||
key="instance_name"
|
||||
name="instance_name"
|
||||
control={control}
|
||||
type="text"
|
||||
label="Name of instance"
|
||||
placeholder="Instance name"
|
||||
error={Boolean(errors.instance_name)}
|
||||
required
|
||||
/>
|
||||
|
||||
<div className="flex flex-col gap-1">
|
||||
<h4 className="text-sm text-custom-text-300">Email</h4>
|
||||
<Input
|
||||
id="email"
|
||||
name="email"
|
||||
type="email"
|
||||
value={instanceAdmins[0]?.user_detail?.email ?? ""}
|
||||
placeholder="Admin email"
|
||||
className="w-full cursor-not-allowed !text-custom-text-400"
|
||||
autoComplete="on"
|
||||
disabled
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col gap-1">
|
||||
<h4 className="text-sm text-custom-text-300">Instance ID</h4>
|
||||
<Input
|
||||
id="instance_id"
|
||||
name="instance_id"
|
||||
type="text"
|
||||
value={instance.instance_id}
|
||||
className="w-full cursor-not-allowed rounded-md font-medium !text-custom-text-400"
|
||||
disabled
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="space-y-3">
|
||||
<div className="text-lg font-medium">Chat + telemetry</div>
|
||||
<IntercomConfig isTelemetryEnabled={watch("is_telemetry_enabled") ?? false} />
|
||||
<div className="flex items-center gap-14 px-4 py-3 border border-custom-border-200 rounded">
|
||||
<div className="grow flex items-center gap-4">
|
||||
<div className="shrink-0">
|
||||
<div className="flex items-center justify-center w-10 h-10 bg-custom-background-80 rounded-full">
|
||||
<Telescope className="w-6 h-6 text-custom-text-300/80 p-0.5" />
|
||||
</div>
|
||||
</div>
|
||||
<div className="grow">
|
||||
<div className="text-sm font-medium text-custom-text-100 leading-5">
|
||||
Let Plane collect anonymous usage data
|
||||
</div>
|
||||
<div className="text-xs font-normal text-custom-text-300 leading-5">
|
||||
No PII is collected.This anonymized data is used to understand how you use Plane and build new features
|
||||
in line with{" "}
|
||||
<a
|
||||
href="https://developers.plane.so/self-hosting/telemetry"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
our Telemetry Policy.
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className={`shrink-0 ${isSubmitting && "opacity-70"}`}>
|
||||
<Controller
|
||||
control={control}
|
||||
name="is_telemetry_enabled"
|
||||
render={({ field: { value, onChange } }) => (
|
||||
<ToggleSwitch value={value ?? false} onChange={onChange} size="sm" disabled={isSubmitting} />
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<Button variant="primary" onClick={handleSubmit(onSubmit)} loading={isSubmitting}>
|
||||
{isSubmitting ? "Saving..." : "Save changes"}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
});
|
||||
@@ -1,11 +0,0 @@
|
||||
import { ReactNode } from "react";
|
||||
import { Metadata } from "next";
|
||||
import { AdminLayout } from "@/layouts/admin-layout";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "General Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function GeneralLayout({ children }: { children: ReactNode }) {
|
||||
return <AdminLayout>{children}</AdminLayout>;
|
||||
}
|
||||
@@ -1,80 +0,0 @@
|
||||
"use client";
|
||||
import { FC } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { IFormattedInstanceConfiguration, TInstanceImageConfigurationKeys } from "@plane/types";
|
||||
import { Button, TOAST_TYPE, setToast } from "@plane/ui";
|
||||
// components
|
||||
import { ControllerInput } from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
|
||||
type IInstanceImageConfigForm = {
|
||||
config: IFormattedInstanceConfiguration;
|
||||
};
|
||||
|
||||
type ImageConfigFormValues = Record<TInstanceImageConfigurationKeys, string>;
|
||||
|
||||
export const InstanceImageConfigForm: FC<IInstanceImageConfigForm> = (props) => {
|
||||
const { config } = props;
|
||||
// store hooks
|
||||
const { updateInstanceConfigurations } = useInstance();
|
||||
// form data
|
||||
const {
|
||||
handleSubmit,
|
||||
control,
|
||||
formState: { errors, isSubmitting },
|
||||
} = useForm<ImageConfigFormValues>({
|
||||
defaultValues: {
|
||||
UNSPLASH_ACCESS_KEY: config["UNSPLASH_ACCESS_KEY"],
|
||||
},
|
||||
});
|
||||
|
||||
const onSubmit = async (formData: ImageConfigFormValues) => {
|
||||
const payload: Partial<ImageConfigFormValues> = { ...formData };
|
||||
|
||||
await updateInstanceConfigurations(payload)
|
||||
.then(() =>
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Success",
|
||||
message: "Image Configuration Settings updated successfully",
|
||||
})
|
||||
)
|
||||
.catch((err) => console.error(err));
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
<div className="grid-col grid w-full grid-cols-1 items-center justify-between gap-x-16 gap-y-8 lg:grid-cols-2">
|
||||
<ControllerInput
|
||||
control={control}
|
||||
type="password"
|
||||
name="UNSPLASH_ACCESS_KEY"
|
||||
label="Access key from your Unsplash account"
|
||||
description={
|
||||
<>
|
||||
You will find your access key in your Unsplash developer console.
|
||||
<a
|
||||
href="https://unsplash.com/documentation#creating-a-developer-account"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
>
|
||||
Learn more.
|
||||
</a>
|
||||
</>
|
||||
}
|
||||
placeholder="oXgq-sdfadsaeweqasdfasdf3234234rassd"
|
||||
error={Boolean(errors.UNSPLASH_ACCESS_KEY)}
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<Button variant="primary" onClick={handleSubmit(onSubmit)} loading={isSubmitting}>
|
||||
{isSubmitting ? "Saving..." : "Save changes"}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -1,15 +0,0 @@
|
||||
import { ReactNode } from "react";
|
||||
import { Metadata } from "next";
|
||||
import { AdminLayout } from "@/layouts/admin-layout";
|
||||
|
||||
interface ImageLayoutProps {
|
||||
children: ReactNode;
|
||||
}
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Images Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function ImageLayout({ children }: ImageLayoutProps) {
|
||||
return <AdminLayout>{children}</AdminLayout>;
|
||||
}
|
||||
@@ -1,47 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { ReactNode } from "react";
|
||||
import { ThemeProvider, useTheme } from "next-themes";
|
||||
import { SWRConfig } from "swr";
|
||||
// plane imports
|
||||
import { ADMIN_BASE_PATH, DEFAULT_SWR_CONFIG } from "@plane/constants";
|
||||
import { Toast } from "@plane/ui";
|
||||
import { resolveGeneralTheme } from "@plane/utils";
|
||||
// lib
|
||||
import { InstanceProvider } from "@/lib/instance-provider";
|
||||
import { StoreProvider } from "@/lib/store-provider";
|
||||
import { UserProvider } from "@/lib/user-provider";
|
||||
// styles
|
||||
import "@/styles/globals.css";
|
||||
|
||||
const ToastWithTheme = () => {
|
||||
const { resolvedTheme } = useTheme();
|
||||
return <Toast theme={resolveGeneralTheme(resolvedTheme)} />;
|
||||
};
|
||||
|
||||
export default function RootLayout({ children }: { children: ReactNode }) {
|
||||
const ASSET_PREFIX = ADMIN_BASE_PATH;
|
||||
return (
|
||||
<html lang="en">
|
||||
<head>
|
||||
<link rel="apple-touch-icon" sizes="180x180" href={`${ASSET_PREFIX}/favicon/apple-touch-icon.png`} />
|
||||
<link rel="icon" type="image/png" sizes="32x32" href={`${ASSET_PREFIX}/favicon/favicon-32x32.png`} />
|
||||
<link rel="icon" type="image/png" sizes="16x16" href={`${ASSET_PREFIX}/favicon/favicon-16x16.png`} />
|
||||
<link rel="manifest" href={`${ASSET_PREFIX}/site.webmanifest.json`} />
|
||||
<link rel="shortcut icon" href={`${ASSET_PREFIX}/favicon/favicon.ico`} />
|
||||
</head>
|
||||
<body className={`antialiased`}>
|
||||
<ThemeProvider themes={["light", "dark"]} defaultTheme="system" enableSystem>
|
||||
<ToastWithTheme />
|
||||
<SWRConfig value={DEFAULT_SWR_CONFIG}>
|
||||
<StoreProvider>
|
||||
<InstanceProvider>
|
||||
<UserProvider>{children}</UserProvider>
|
||||
</InstanceProvider>
|
||||
</StoreProvider>
|
||||
</SWRConfig>
|
||||
</ThemeProvider>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
import { Metadata } from "next";
|
||||
// components
|
||||
import { InstanceSignInForm } from "@/components/login";
|
||||
// layouts
|
||||
import { DefaultLayout } from "@/layouts/default-layout";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Plane | Simple, extensible, open-source project management tool.",
|
||||
description:
|
||||
"Open-source project management tool to manage work items, sprints, and product roadmaps with peace of mind.",
|
||||
openGraph: {
|
||||
title: "Plane | Simple, extensible, open-source project management tool.",
|
||||
description:
|
||||
"Open-source project management tool to manage work items, sprints, and product roadmaps with peace of mind.",
|
||||
url: "https://plane.so/",
|
||||
},
|
||||
keywords:
|
||||
"software development, customer feedback, software, accelerate, code management, release management, project management, work items tracking, agile, scrum, kanban, collaboration",
|
||||
twitter: {
|
||||
site: "@planepowers",
|
||||
},
|
||||
};
|
||||
|
||||
export default async function LoginPage() {
|
||||
return (
|
||||
<DefaultLayout>
|
||||
<InstanceSignInForm />
|
||||
</DefaultLayout>
|
||||
);
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
import { ReactNode } from "react";
|
||||
import { Metadata } from "next";
|
||||
// layouts
|
||||
import { AdminLayout } from "@/layouts/admin-layout";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Workspace Management - Plane Web",
|
||||
};
|
||||
|
||||
export default function WorkspaceManagementLayout({ children }: { children: ReactNode }) {
|
||||
return <AdminLayout>{children}</AdminLayout>;
|
||||
}
|
||||
@@ -1,167 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { observer } from "mobx-react";
|
||||
import Link from "next/link";
|
||||
import useSWR from "swr";
|
||||
import { Loader as LoaderIcon } from "lucide-react";
|
||||
// types
|
||||
import { TInstanceConfigurationKeys } from "@plane/types";
|
||||
import { Button, getButtonStyling, Loader, setPromiseToast, ToggleSwitch } from "@plane/ui";
|
||||
import { cn } from "@plane/utils";
|
||||
// components
|
||||
import { WorkspaceListItem } from "@/components/workspace";
|
||||
// hooks
|
||||
import { useInstance, useWorkspace } from "@/hooks/store";
|
||||
|
||||
const WorkspaceManagementPage = observer(() => {
|
||||
// states
|
||||
const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
|
||||
// store
|
||||
const { formattedConfig, fetchInstanceConfigurations, updateInstanceConfigurations } = useInstance();
|
||||
const {
|
||||
workspaceIds,
|
||||
loader: workspaceLoader,
|
||||
paginationInfo,
|
||||
fetchWorkspaces,
|
||||
fetchNextWorkspaces,
|
||||
} = useWorkspace();
|
||||
// derived values
|
||||
const disableWorkspaceCreation = formattedConfig?.DISABLE_WORKSPACE_CREATION ?? "";
|
||||
const hasNextPage = paginationInfo?.next_page_results && paginationInfo?.next_cursor !== undefined;
|
||||
|
||||
// fetch data
|
||||
useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
|
||||
useSWR("INSTANCE_WORKSPACES", () => fetchWorkspaces());
|
||||
|
||||
const updateConfig = async (key: TInstanceConfigurationKeys, value: string) => {
|
||||
setIsSubmitting(true);
|
||||
|
||||
const payload = {
|
||||
[key]: value,
|
||||
};
|
||||
|
||||
const updateConfigPromise = updateInstanceConfigurations(payload);
|
||||
|
||||
setPromiseToast(updateConfigPromise, {
|
||||
loading: "Saving configuration",
|
||||
success: {
|
||||
title: "Success",
|
||||
message: () => "Configuration saved successfully",
|
||||
},
|
||||
error: {
|
||||
title: "Error",
|
||||
message: () => "Failed to save configuration",
|
||||
},
|
||||
});
|
||||
|
||||
await updateConfigPromise
|
||||
.then(() => {
|
||||
setIsSubmitting(false);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
setIsSubmitting(false);
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="flex items-center justify-between gap-4 border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<div className="flex flex-col gap-1">
|
||||
<div className="text-xl font-medium text-custom-text-100">Workspaces on this instance</div>
|
||||
<div className="text-sm font-normal text-custom-text-300">
|
||||
See all workspaces and control who can create them.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
<div className="space-y-3">
|
||||
{formattedConfig ? (
|
||||
<div className={cn("w-full flex items-center gap-14 rounded")}>
|
||||
<div className="flex grow items-center gap-4">
|
||||
<div className="grow">
|
||||
<div className="text-lg font-medium pb-1">Prevent anyone else from creating a workspace.</div>
|
||||
<div className={cn("font-normal leading-5 text-custom-text-300 text-xs")}>
|
||||
Toggling this on will let only you create workspaces. You will have to invite users to new
|
||||
workspaces.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className={`shrink-0 pr-4 ${isSubmitting && "opacity-70"}`}>
|
||||
<div className="flex items-center gap-4">
|
||||
<ToggleSwitch
|
||||
value={Boolean(parseInt(disableWorkspaceCreation))}
|
||||
onChange={() => {
|
||||
if (Boolean(parseInt(disableWorkspaceCreation)) === true) {
|
||||
updateConfig("DISABLE_WORKSPACE_CREATION", "0");
|
||||
} else {
|
||||
updateConfig("DISABLE_WORKSPACE_CREATION", "1");
|
||||
}
|
||||
}}
|
||||
size="sm"
|
||||
disabled={isSubmitting}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<Loader>
|
||||
<Loader.Item height="50px" width="100%" />
|
||||
</Loader>
|
||||
)}
|
||||
{workspaceLoader !== "init-loader" ? (
|
||||
<>
|
||||
<div className="pt-6 flex items-center justify-between gap-2">
|
||||
<div className="flex flex-col items-start gap-x-2">
|
||||
<div className="flex items-center gap-2 text-lg font-medium">
|
||||
All workspaces on this instance{" "}
|
||||
<span className="text-custom-text-300">• {workspaceIds.length}</span>
|
||||
{workspaceLoader && ["mutation", "pagination"].includes(workspaceLoader) && (
|
||||
<LoaderIcon className="w-4 h-4 animate-spin" />
|
||||
)}
|
||||
</div>
|
||||
<div className={cn("font-normal leading-5 text-custom-text-300 text-xs")}>
|
||||
You can't yet delete workspaces and you can only go to the workspace if you are an Admin or a
|
||||
Member.
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<Link href="/workspace/create" className={getButtonStyling("primary", "sm")}>
|
||||
Create workspace
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex flex-col gap-4 py-2">
|
||||
{workspaceIds.map((workspaceId) => (
|
||||
<WorkspaceListItem key={workspaceId} workspaceId={workspaceId} />
|
||||
))}
|
||||
</div>
|
||||
{hasNextPage && (
|
||||
<div className="flex justify-center">
|
||||
<Button
|
||||
variant="link-primary"
|
||||
onClick={() => fetchNextWorkspaces()}
|
||||
disabled={workspaceLoader === "pagination"}
|
||||
>
|
||||
Load more
|
||||
{workspaceLoader === "pagination" && <LoaderIcon className="w-3 h-3 animate-spin" />}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
<Loader className="space-y-10 py-8">
|
||||
<Loader.Item height="24px" width="20%" />
|
||||
<Loader.Item height="92px" width="100%" />
|
||||
<Loader.Item height="92px" width="100%" />
|
||||
<Loader.Item height="92px" width="100%" />
|
||||
</Loader>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
export default WorkspaceManagementPage;
|
||||
@@ -1,70 +0,0 @@
|
||||
import { observer } from "mobx-react";
|
||||
import Image from "next/image";
|
||||
import { useTheme } from "next-themes";
|
||||
// types
|
||||
import {
|
||||
TGetBaseAuthenticationModeProps,
|
||||
TInstanceAuthenticationMethodKeys,
|
||||
TInstanceAuthenticationModes,
|
||||
} from "@plane/types";
|
||||
// components
|
||||
import { AuthenticationMethodCard } from "@/components/authentication";
|
||||
// helpers
|
||||
import { getBaseAuthenticationModes } from "@/lib/auth-helpers";
|
||||
// plane admin components
|
||||
import { UpgradeButton } from "@/plane-admin/components/common";
|
||||
// images
|
||||
import OIDCLogo from "@/public/logos/oidc-logo.svg";
|
||||
import SAMLLogo from "@/public/logos/saml-logo.svg";
|
||||
|
||||
export type TAuthenticationModeProps = {
|
||||
disabled: boolean;
|
||||
updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
|
||||
};
|
||||
|
||||
// Authentication methods
|
||||
export const getAuthenticationModes: (props: TGetBaseAuthenticationModeProps) => TInstanceAuthenticationModes[] = ({
|
||||
disabled,
|
||||
updateConfig,
|
||||
resolvedTheme,
|
||||
}) => [
|
||||
...getBaseAuthenticationModes({ disabled, updateConfig, resolvedTheme }),
|
||||
{
|
||||
key: "oidc",
|
||||
name: "OIDC",
|
||||
description: "Authenticate your users via the OpenID Connect protocol.",
|
||||
icon: <Image src={OIDCLogo} height={22} width={22} alt="OIDC Logo" />,
|
||||
config: <UpgradeButton />,
|
||||
unavailable: true,
|
||||
},
|
||||
{
|
||||
key: "saml",
|
||||
name: "SAML",
|
||||
description: "Authenticate your users via the Security Assertion Markup Language protocol.",
|
||||
icon: <Image src={SAMLLogo} height={22} width={22} alt="SAML Logo" className="pl-0.5" />,
|
||||
config: <UpgradeButton />,
|
||||
unavailable: true,
|
||||
},
|
||||
];
|
||||
|
||||
export const AuthenticationModes: React.FC<TAuthenticationModeProps> = observer((props) => {
|
||||
const { disabled, updateConfig } = props;
|
||||
// next-themes
|
||||
const { resolvedTheme } = useTheme();
|
||||
|
||||
return (
|
||||
<>
|
||||
{getAuthenticationModes({ disabled, updateConfig, resolvedTheme }).map((method) => (
|
||||
<AuthenticationMethodCard
|
||||
key={method.key}
|
||||
name={method.name}
|
||||
description={method.description}
|
||||
icon={method.icon}
|
||||
config={method.config}
|
||||
disabled={disabled}
|
||||
unavailable={method.unavailable}
|
||||
/>
|
||||
))}
|
||||
</>
|
||||
);
|
||||
});
|
||||
@@ -1,139 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { FC, useState, useRef } from "react";
|
||||
import { observer } from "mobx-react";
|
||||
import Link from "next/link";
|
||||
import { ExternalLink, FileText, HelpCircle, MoveLeft } from "lucide-react";
|
||||
import { Transition } from "@headlessui/react";
|
||||
// plane internal packages
|
||||
import { WEB_BASE_URL } from "@plane/constants";
|
||||
import { DiscordIcon, GithubIcon, Tooltip } from "@plane/ui";
|
||||
import { cn } from "@plane/utils";
|
||||
// hooks
|
||||
import { useTheme } from "@/hooks/store";
|
||||
// assets
|
||||
// eslint-disable-next-line import/order
|
||||
import packageJson from "package.json";
|
||||
|
||||
const helpOptions = [
|
||||
{
|
||||
name: "Documentation",
|
||||
href: "https://docs.plane.so/",
|
||||
Icon: FileText,
|
||||
},
|
||||
{
|
||||
name: "Join our Discord",
|
||||
href: "https://discord.com/invite/A92xrEGCge",
|
||||
Icon: DiscordIcon,
|
||||
},
|
||||
{
|
||||
name: "Report a bug",
|
||||
href: "https://github.com/makeplane/plane/issues/new/choose",
|
||||
Icon: GithubIcon,
|
||||
},
|
||||
];
|
||||
|
||||
export const HelpSection: FC = observer(() => {
|
||||
// states
|
||||
const [isNeedHelpOpen, setIsNeedHelpOpen] = useState(false);
|
||||
// store
|
||||
const { isSidebarCollapsed, toggleSidebar } = useTheme();
|
||||
// refs
|
||||
const helpOptionsRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
const redirectionLink = encodeURI(WEB_BASE_URL + "/");
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
"flex w-full items-center justify-between gap-1 self-baseline border-t border-custom-border-200 bg-custom-sidebar-background-100 px-4 h-14 flex-shrink-0",
|
||||
{
|
||||
"flex-col h-auto py-1.5": isSidebarCollapsed,
|
||||
}
|
||||
)}
|
||||
>
|
||||
<div className={`flex items-center gap-1 ${isSidebarCollapsed ? "flex-col justify-center" : "w-full"}`}>
|
||||
<Tooltip tooltipContent="Redirect to Plane" position="right" className="ml-4" disabled={!isSidebarCollapsed}>
|
||||
<a
|
||||
href={redirectionLink}
|
||||
className={`relative px-2 py-1.5 flex items-center gap-2 font-medium rounded border border-custom-primary-100/20 bg-custom-primary-100/10 text-xs text-custom-primary-200 whitespace-nowrap`}
|
||||
>
|
||||
<ExternalLink size={14} />
|
||||
{!isSidebarCollapsed && "Redirect to Plane"}
|
||||
</a>
|
||||
</Tooltip>
|
||||
<Tooltip tooltipContent="Help" position={isSidebarCollapsed ? "right" : "top"} className="ml-4">
|
||||
<button
|
||||
type="button"
|
||||
className={`ml-auto grid place-items-center rounded-md p-1.5 text-custom-text-200 outline-none hover:bg-custom-background-90 hover:text-custom-text-100 ${
|
||||
isSidebarCollapsed ? "w-full" : ""
|
||||
}`}
|
||||
onClick={() => setIsNeedHelpOpen((prev) => !prev)}
|
||||
>
|
||||
<HelpCircle className="h-3.5 w-3.5" />
|
||||
</button>
|
||||
</Tooltip>
|
||||
<Tooltip tooltipContent="Toggle sidebar" position={isSidebarCollapsed ? "right" : "top"} className="ml-4">
|
||||
<button
|
||||
type="button"
|
||||
className={`grid place-items-center rounded-md p-1.5 text-custom-text-200 outline-none hover:bg-custom-background-90 hover:text-custom-text-100 ${
|
||||
isSidebarCollapsed ? "w-full" : ""
|
||||
}`}
|
||||
onClick={() => toggleSidebar(!isSidebarCollapsed)}
|
||||
>
|
||||
<MoveLeft className={`h-3.5 w-3.5 duration-300 ${isSidebarCollapsed ? "rotate-180" : ""}`} />
|
||||
</button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<div className="relative">
|
||||
<Transition
|
||||
show={isNeedHelpOpen}
|
||||
enter="transition ease-out duration-100"
|
||||
enterFrom="transform opacity-0 scale-95"
|
||||
enterTo="transform opacity-100 scale-100"
|
||||
leave="transition ease-in duration-75"
|
||||
leaveFrom="transform opacity-100 scale-100"
|
||||
leaveTo="transform opacity-0 scale-95"
|
||||
>
|
||||
<div
|
||||
className={`absolute bottom-2 min-w-[10rem] z-[15] ${
|
||||
isSidebarCollapsed ? "left-full" : "-left-[75px]"
|
||||
} divide-y divide-custom-border-200 whitespace-nowrap rounded bg-custom-background-100 p-1 shadow-custom-shadow-xs`}
|
||||
ref={helpOptionsRef}
|
||||
>
|
||||
<div className="space-y-1 pb-2">
|
||||
{helpOptions.map(({ name, Icon, href }) => {
|
||||
if (href)
|
||||
return (
|
||||
<Link href={href} key={name} target="_blank">
|
||||
<div className="flex items-center gap-x-2 rounded px-2 py-1 text-xs hover:bg-custom-background-80">
|
||||
<div className="grid flex-shrink-0 place-items-center">
|
||||
<Icon className="h-3.5 w-3.5 text-custom-text-200" size={14} />
|
||||
</div>
|
||||
<span className="text-xs">{name}</span>
|
||||
</div>
|
||||
</Link>
|
||||
);
|
||||
else
|
||||
return (
|
||||
<button
|
||||
key={name}
|
||||
type="button"
|
||||
className="flex w-full items-center gap-x-2 rounded px-2 py-1 text-xs hover:bg-custom-background-80"
|
||||
>
|
||||
<div className="grid flex-shrink-0 place-items-center">
|
||||
<Icon className="h-3.5 w-3.5 text-custom-text-200" />
|
||||
</div>
|
||||
<span className="text-xs">{name}</span>
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
<div className="px-2 pb-1 pt-2 text-[10px]">Version: v{packageJson.version}</div>
|
||||
</div>
|
||||
</Transition>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
});
|
||||
@@ -1,5 +0,0 @@
|
||||
export * from "./root";
|
||||
export * from "./help-section";
|
||||
export * from "./sidebar-menu";
|
||||
export * from "./sidebar-dropdown";
|
||||
export * from "./sidebar-menu-hamburger-toogle";
|
||||
@@ -1,56 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { FC, useEffect, useRef } from "react";
|
||||
import { observer } from "mobx-react";
|
||||
// plane helpers
|
||||
import { useOutsideClickDetector } from "@plane/hooks";
|
||||
// components
|
||||
import { HelpSection, SidebarMenu, SidebarDropdown } from "@/components/admin-sidebar";
|
||||
// hooks
|
||||
import { useTheme } from "@/hooks/store";
|
||||
|
||||
export const InstanceSidebar: FC = observer(() => {
|
||||
// store
|
||||
const { isSidebarCollapsed, toggleSidebar } = useTheme();
|
||||
|
||||
const ref = useRef<HTMLDivElement>(null);
|
||||
|
||||
useOutsideClickDetector(ref, () => {
|
||||
if (isSidebarCollapsed === false) {
|
||||
if (window.innerWidth < 768) {
|
||||
toggleSidebar(!isSidebarCollapsed);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const handleResize = () => {
|
||||
if (window.innerWidth <= 768) {
|
||||
toggleSidebar(true);
|
||||
}
|
||||
};
|
||||
handleResize();
|
||||
window.addEventListener("resize", handleResize);
|
||||
return () => {
|
||||
window.removeEventListener("resize", handleResize);
|
||||
};
|
||||
}, [toggleSidebar]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`inset-y-0 z-20 flex h-full flex-shrink-0 flex-grow-0 flex-col border-r border-custom-sidebar-border-200 bg-custom-sidebar-background-100 duration-300
|
||||
fixed md:relative
|
||||
${isSidebarCollapsed ? "-ml-[290px]" : ""}
|
||||
sm:${isSidebarCollapsed ? "-ml-[290px]" : ""}
|
||||
md:ml-0 ${isSidebarCollapsed ? "w-[70px]" : "w-[290px]"}
|
||||
lg:ml-0 ${isSidebarCollapsed ? "w-[70px]" : "w-[290px]"}
|
||||
`}
|
||||
>
|
||||
<div ref={ref} className="flex h-full w-full flex-1 flex-col">
|
||||
<SidebarDropdown />
|
||||
<SidebarMenu />
|
||||
<HelpSection />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
});
|
||||
@@ -1,20 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { FC } from "react";
|
||||
import { observer } from "mobx-react";
|
||||
// hooks
|
||||
import { Menu } from "lucide-react";
|
||||
import { useTheme } from "@/hooks/store";
|
||||
// icons
|
||||
|
||||
export const SidebarHamburgerToggle: FC = observer(() => {
|
||||
const { isSidebarCollapsed, toggleSidebar } = useTheme();
|
||||
return (
|
||||
<div
|
||||
className="w-7 h-7 rounded flex justify-center items-center bg-custom-background-80 transition-all hover:bg-custom-background-90 cursor-pointer group md:hidden"
|
||||
onClick={() => toggleSidebar(!isSidebarCollapsed)}
|
||||
>
|
||||
<Menu size={14} className="text-custom-text-200 group-hover:text-custom-text-100 transition-all" />
|
||||
</div>
|
||||
);
|
||||
});
|
||||
@@ -1,96 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { FC } from "react";
|
||||
import { observer } from "mobx-react";
|
||||
import { usePathname } from "next/navigation";
|
||||
// mobx
|
||||
// ui
|
||||
import { Settings } from "lucide-react";
|
||||
// icons
|
||||
import { Breadcrumbs } from "@plane/ui";
|
||||
// components
|
||||
import { SidebarHamburgerToggle } from "@/components/admin-sidebar";
|
||||
import { BreadcrumbLink } from "@/components/common";
|
||||
|
||||
export const InstanceHeader: FC = observer(() => {
|
||||
const pathName = usePathname();
|
||||
|
||||
const getHeaderTitle = (pathName: string) => {
|
||||
switch (pathName) {
|
||||
case "general":
|
||||
return "General";
|
||||
case "ai":
|
||||
return "Artificial Intelligence";
|
||||
case "email":
|
||||
return "Email";
|
||||
case "authentication":
|
||||
return "Authentication";
|
||||
case "image":
|
||||
return "Image";
|
||||
case "google":
|
||||
return "Google";
|
||||
case "github":
|
||||
return "GitHub";
|
||||
case "gitlab":
|
||||
return "GitLab";
|
||||
case "workspace":
|
||||
return "Workspace";
|
||||
case "create":
|
||||
return "Create";
|
||||
default:
|
||||
return pathName.toUpperCase();
|
||||
}
|
||||
};
|
||||
|
||||
// Function to dynamically generate breadcrumb items based on pathname
|
||||
const generateBreadcrumbItems = (pathname: string) => {
|
||||
const pathSegments = pathname.split("/").slice(1); // removing the first empty string.
|
||||
pathSegments.pop();
|
||||
|
||||
let currentUrl = "";
|
||||
const breadcrumbItems = pathSegments.map((segment) => {
|
||||
currentUrl += "/" + segment;
|
||||
return {
|
||||
title: getHeaderTitle(segment),
|
||||
href: currentUrl,
|
||||
};
|
||||
});
|
||||
return breadcrumbItems;
|
||||
};
|
||||
|
||||
const breadcrumbItems = generateBreadcrumbItems(pathName);
|
||||
|
||||
return (
|
||||
<div className="relative z-10 flex h-[3.75rem] w-full flex-shrink-0 flex-row items-center justify-between gap-x-2 gap-y-4 border-b border-custom-sidebar-border-200 bg-custom-sidebar-background-100 p-4">
|
||||
<div className="flex w-full flex-grow items-center gap-2 overflow-ellipsis whitespace-nowrap">
|
||||
<SidebarHamburgerToggle />
|
||||
{breadcrumbItems.length >= 0 && (
|
||||
<div>
|
||||
<Breadcrumbs>
|
||||
<Breadcrumbs.BreadcrumbItem
|
||||
type="text"
|
||||
link={
|
||||
<BreadcrumbLink
|
||||
href="/general/"
|
||||
label="Settings"
|
||||
icon={<Settings className="h-4 w-4 text-custom-text-300" />}
|
||||
/>
|
||||
}
|
||||
/>
|
||||
{breadcrumbItems.map(
|
||||
(item) =>
|
||||
item.title && (
|
||||
<Breadcrumbs.BreadcrumbItem
|
||||
key={item.title}
|
||||
type="text"
|
||||
link={<BreadcrumbLink href={item.href} label={item.title} />}
|
||||
/>
|
||||
)
|
||||
)}
|
||||
</Breadcrumbs>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
});
|
||||
@@ -1,7 +0,0 @@
|
||||
export * from "./auth-banner";
|
||||
export * from "./email-config-switch";
|
||||
export * from "./password-config-switch";
|
||||
export * from "./authentication-method-card";
|
||||
export * from "./gitlab-config";
|
||||
export * from "./github-config";
|
||||
export * from "./google-config";
|
||||
@@ -1,10 +0,0 @@
|
||||
export * from "./breadcrumb-link";
|
||||
export * from "./confirm-discard-modal";
|
||||
export * from "./controller-input";
|
||||
export * from "./copy-field";
|
||||
export * from "./password-strength-meter";
|
||||
export * from "./banner";
|
||||
export * from "./empty-state";
|
||||
export * from "./logo-spinner";
|
||||
export * from "./page-header";
|
||||
export * from "./code-block";
|
||||
@@ -1,89 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { FC, useMemo } from "react";
|
||||
// plane internal packages
|
||||
import { E_PASSWORD_STRENGTH } from "@plane/constants";
|
||||
import { cn, getPasswordStrength } from "@plane/utils";
|
||||
|
||||
type TPasswordStrengthMeter = {
|
||||
password: string;
|
||||
isFocused?: boolean;
|
||||
};
|
||||
|
||||
export const PasswordStrengthMeter: FC<TPasswordStrengthMeter> = (props) => {
|
||||
const { password, isFocused = false } = props;
|
||||
// derived values
|
||||
const strength = useMemo(() => getPasswordStrength(password), [password]);
|
||||
const strengthBars = useMemo(() => {
|
||||
switch (strength) {
|
||||
case E_PASSWORD_STRENGTH.EMPTY: {
|
||||
return {
|
||||
bars: [`bg-custom-text-100`, `bg-custom-text-100`, `bg-custom-text-100`],
|
||||
text: "Please enter your password.",
|
||||
textColor: "text-custom-text-100",
|
||||
};
|
||||
}
|
||||
case E_PASSWORD_STRENGTH.LENGTH_NOT_VALID: {
|
||||
return {
|
||||
bars: [`bg-red-500`, `bg-custom-text-100`, `bg-custom-text-100`],
|
||||
text: "Password length should me more than 8 characters.",
|
||||
textColor: "text-red-500",
|
||||
};
|
||||
}
|
||||
case E_PASSWORD_STRENGTH.STRENGTH_NOT_VALID: {
|
||||
return {
|
||||
bars: [`bg-red-500`, `bg-custom-text-100`, `bg-custom-text-100`],
|
||||
text: "Password is weak.",
|
||||
textColor: "text-red-500",
|
||||
};
|
||||
}
|
||||
case E_PASSWORD_STRENGTH.STRENGTH_VALID: {
|
||||
return {
|
||||
bars: [`bg-green-500`, `bg-green-500`, `bg-green-500`],
|
||||
text: "Password is strong.",
|
||||
textColor: "text-green-500",
|
||||
};
|
||||
}
|
||||
default: {
|
||||
return {
|
||||
bars: [`bg-custom-text-100`, `bg-custom-text-100`, `bg-custom-text-100`],
|
||||
text: "Please enter your password.",
|
||||
textColor: "text-custom-text-100",
|
||||
};
|
||||
}
|
||||
}
|
||||
}, [strength]);
|
||||
|
||||
const isPasswordMeterVisible = isFocused ? true : strength === E_PASSWORD_STRENGTH.STRENGTH_VALID ? false : true;
|
||||
|
||||
if (!isPasswordMeterVisible) return <></>;
|
||||
return (
|
||||
<div className="w-full space-y-2 pt-2">
|
||||
<div className="space-y-1.5">
|
||||
<div className="relative flex items-center gap-2">
|
||||
{strengthBars?.bars.map((color, index) => (
|
||||
<div key={`${color}-${index}`} className={cn("w-full h-1 rounded-full", color)} />
|
||||
))}
|
||||
</div>
|
||||
<div className={cn(`text-xs font-medium text-custom-text-100`, strengthBars?.textColor)}>
|
||||
{strengthBars?.text}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* <div className="relative flex flex-wrap gap-x-4 gap-y-2">
|
||||
{PASSWORD_CRITERIA.map((criteria) => (
|
||||
<div
|
||||
key={criteria.key}
|
||||
className={cn(
|
||||
"relative flex items-center gap-1 text-xs",
|
||||
criteria.isCriteriaValid(password) ? `text-green-500/70` : "text-custom-text-300"
|
||||
)}
|
||||
>
|
||||
<CircleCheck width={14} height={14} />
|
||||
{criteria.label}
|
||||
</div>
|
||||
))}
|
||||
</div> */}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -1,3 +0,0 @@
|
||||
export * from "./instance-not-ready";
|
||||
export * from "./instance-failure-view";
|
||||
export * from "./setup-form";
|
||||
@@ -1 +0,0 @@
|
||||
export * from "./sign-in-form";
|
||||
@@ -1,194 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { FC, useEffect, useMemo, useState } from "react";
|
||||
import { useSearchParams } from "next/navigation";
|
||||
import { Eye, EyeOff } from "lucide-react";
|
||||
// plane internal packages
|
||||
import { API_BASE_URL, EAdminAuthErrorCodes, TAuthErrorInfo } from "@plane/constants";
|
||||
import { AuthService } from "@plane/services";
|
||||
import { Button, Input, Spinner } from "@plane/ui";
|
||||
// components
|
||||
import { Banner } from "@/components/common";
|
||||
// helpers
|
||||
import { authErrorHandler } from "@/lib/auth-helpers";
|
||||
// local components
|
||||
import { AuthBanner } from "../authentication";
|
||||
|
||||
// service initialization
|
||||
const authService = new AuthService();
|
||||
|
||||
// error codes
|
||||
enum EErrorCodes {
|
||||
INSTANCE_NOT_CONFIGURED = "INSTANCE_NOT_CONFIGURED",
|
||||
REQUIRED_EMAIL_PASSWORD = "REQUIRED_EMAIL_PASSWORD",
|
||||
INVALID_EMAIL = "INVALID_EMAIL",
|
||||
USER_DOES_NOT_EXIST = "USER_DOES_NOT_EXIST",
|
||||
AUTHENTICATION_FAILED = "AUTHENTICATION_FAILED",
|
||||
}
|
||||
|
||||
type TError = {
|
||||
type: EErrorCodes | undefined;
|
||||
message: string | undefined;
|
||||
};
|
||||
|
||||
// form data
|
||||
type TFormData = {
|
||||
email: string;
|
||||
password: string;
|
||||
};
|
||||
|
||||
const defaultFromData: TFormData = {
|
||||
email: "",
|
||||
password: "",
|
||||
};
|
||||
|
||||
export const InstanceSignInForm: FC = (props) => {
|
||||
const {} = props;
|
||||
// search params
|
||||
const searchParams = useSearchParams();
|
||||
const emailParam = searchParams.get("email") || undefined;
|
||||
const errorCode = searchParams.get("error_code") || undefined;
|
||||
const errorMessage = searchParams.get("error_message") || undefined;
|
||||
// state
|
||||
const [showPassword, setShowPassword] = useState(false);
|
||||
const [csrfToken, setCsrfToken] = useState<string | undefined>(undefined);
|
||||
const [formData, setFormData] = useState<TFormData>(defaultFromData);
|
||||
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||
const [errorInfo, setErrorInfo] = useState<TAuthErrorInfo | undefined>(undefined);
|
||||
|
||||
const handleFormChange = (key: keyof TFormData, value: string | boolean) =>
|
||||
setFormData((prev) => ({ ...prev, [key]: value }));
|
||||
|
||||
useEffect(() => {
|
||||
if (csrfToken === undefined)
|
||||
authService.requestCSRFToken().then((data) => data?.csrf_token && setCsrfToken(data.csrf_token));
|
||||
}, [csrfToken]);
|
||||
|
||||
useEffect(() => {
|
||||
if (emailParam) setFormData((prev) => ({ ...prev, email: emailParam }));
|
||||
}, [emailParam]);
|
||||
|
||||
// derived values
|
||||
const errorData: TError = useMemo(() => {
|
||||
if (errorCode && errorMessage) {
|
||||
switch (errorCode) {
|
||||
case EErrorCodes.INSTANCE_NOT_CONFIGURED:
|
||||
return { type: EErrorCodes.INVALID_EMAIL, message: errorMessage };
|
||||
case EErrorCodes.REQUIRED_EMAIL_PASSWORD:
|
||||
return { type: EErrorCodes.REQUIRED_EMAIL_PASSWORD, message: errorMessage };
|
||||
case EErrorCodes.INVALID_EMAIL:
|
||||
return { type: EErrorCodes.INVALID_EMAIL, message: errorMessage };
|
||||
case EErrorCodes.USER_DOES_NOT_EXIST:
|
||||
return { type: EErrorCodes.USER_DOES_NOT_EXIST, message: errorMessage };
|
||||
case EErrorCodes.AUTHENTICATION_FAILED:
|
||||
return { type: EErrorCodes.AUTHENTICATION_FAILED, message: errorMessage };
|
||||
default:
|
||||
return { type: undefined, message: undefined };
|
||||
}
|
||||
} else return { type: undefined, message: undefined };
|
||||
}, [errorCode, errorMessage]);
|
||||
|
||||
const isButtonDisabled = useMemo(
|
||||
() => (!isSubmitting && formData.email && formData.password ? false : true),
|
||||
[formData.email, formData.password, isSubmitting]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (errorCode) {
|
||||
const errorDetail = authErrorHandler(errorCode?.toString() as EAdminAuthErrorCodes);
|
||||
if (errorDetail) {
|
||||
setErrorInfo(errorDetail);
|
||||
}
|
||||
}
|
||||
}, [errorCode]);
|
||||
|
||||
return (
|
||||
<div className="flex-grow container mx-auto max-w-lg px-10 lg:max-w-md lg:px-5 py-10 lg:pt-28 transition-all">
|
||||
<div className="relative flex flex-col space-y-6">
|
||||
<div className="text-center space-y-1">
|
||||
<h3 className="flex gap-4 justify-center text-3xl font-bold text-onboarding-text-100">
|
||||
Manage your Plane instance
|
||||
</h3>
|
||||
<p className="font-medium text-onboarding-text-400">
|
||||
Configure instance-wide settings to secure your instance
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{errorData.type && errorData?.message ? (
|
||||
<Banner type="error" message={errorData?.message} />
|
||||
) : (
|
||||
<>{errorInfo && <AuthBanner bannerData={errorInfo} handleBannerData={(value) => setErrorInfo(value)} />}</>
|
||||
)}
|
||||
|
||||
<form
|
||||
className="space-y-4"
|
||||
method="POST"
|
||||
action={`${API_BASE_URL}/api/instances/admins/sign-in/`}
|
||||
onSubmit={() => setIsSubmitting(true)}
|
||||
onError={() => setIsSubmitting(false)}
|
||||
>
|
||||
<input type="hidden" name="csrfmiddlewaretoken" value={csrfToken} />
|
||||
|
||||
<div className="w-full space-y-1">
|
||||
<label className="text-sm text-onboarding-text-300 font-medium" htmlFor="email">
|
||||
Email <span className="text-red-500">*</span>
|
||||
</label>
|
||||
<Input
|
||||
className="w-full border border-onboarding-border-100 !bg-onboarding-background-200 placeholder:text-onboarding-text-400"
|
||||
id="email"
|
||||
name="email"
|
||||
type="email"
|
||||
inputSize="md"
|
||||
placeholder="name@company.com"
|
||||
value={formData.email}
|
||||
onChange={(e) => handleFormChange("email", e.target.value)}
|
||||
autoComplete="on"
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="w-full space-y-1">
|
||||
<label className="text-sm text-onboarding-text-300 font-medium" htmlFor="password">
|
||||
Password <span className="text-red-500">*</span>
|
||||
</label>
|
||||
<div className="relative">
|
||||
<Input
|
||||
className="w-full border border-onboarding-border-100 !bg-onboarding-background-200 placeholder:text-onboarding-text-400"
|
||||
id="password"
|
||||
name="password"
|
||||
type={showPassword ? "text" : "password"}
|
||||
inputSize="md"
|
||||
placeholder="Enter your password"
|
||||
value={formData.password}
|
||||
onChange={(e) => handleFormChange("password", e.target.value)}
|
||||
autoComplete="on"
|
||||
/>
|
||||
{showPassword ? (
|
||||
<button
|
||||
type="button"
|
||||
className="absolute right-3 top-3.5 flex items-center justify-center text-custom-text-400"
|
||||
onClick={() => setShowPassword(false)}
|
||||
>
|
||||
<EyeOff className="h-4 w-4" />
|
||||
</button>
|
||||
) : (
|
||||
<button
|
||||
type="button"
|
||||
className="absolute right-3 top-3.5 flex items-center justify-center text-custom-text-400"
|
||||
onClick={() => setShowPassword(true)}
|
||||
>
|
||||
<Eye className="h-4 w-4" />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className="py-2">
|
||||
<Button type="submit" size="lg" className="w-full" disabled={isButtonDisabled}>
|
||||
{isSubmitting ? <Spinner height="20px" width="20px" /> : "Sign in"}
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -1 +0,0 @@
|
||||
export * from "./list-item";
|
||||
@@ -1,48 +0,0 @@
|
||||
"use client";
|
||||
import { FC, ReactNode, useEffect } from "react";
|
||||
import { observer } from "mobx-react";
|
||||
import { useRouter } from "next/navigation";
|
||||
// components
|
||||
import { InstanceSidebar } from "@/components/admin-sidebar";
|
||||
import { InstanceHeader } from "@/components/auth-header";
|
||||
import { LogoSpinner } from "@/components/common";
|
||||
import { NewUserPopup } from "@/components/new-user-popup";
|
||||
// hooks
|
||||
import { useUser } from "@/hooks/store";
|
||||
|
||||
type TAdminLayout = {
|
||||
children: ReactNode;
|
||||
};
|
||||
|
||||
export const AdminLayout: FC<TAdminLayout> = observer((props) => {
|
||||
const { children } = props;
|
||||
// router
|
||||
const router = useRouter();
|
||||
// store hooks
|
||||
const { isUserLoggedIn } = useUser();
|
||||
|
||||
useEffect(() => {
|
||||
if (isUserLoggedIn === false) {
|
||||
router.push("/");
|
||||
}
|
||||
}, [router, isUserLoggedIn]);
|
||||
|
||||
if (isUserLoggedIn === undefined) {
|
||||
return (
|
||||
<div className="relative flex h-screen w-full items-center justify-center">
|
||||
<LogoSpinner />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="relative flex h-screen w-screen overflow-hidden">
|
||||
<InstanceSidebar />
|
||||
<main className="relative flex h-full w-full flex-col overflow-hidden bg-custom-background-100">
|
||||
<InstanceHeader />
|
||||
<div className="h-full w-full overflow-hidden">{children}</div>
|
||||
</main>
|
||||
<NewUserPopup />
|
||||
</div>
|
||||
);
|
||||
});
|
||||
@@ -1,45 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { FC, ReactNode } from "react";
|
||||
import Image from "next/image";
|
||||
import Link from "next/link";
|
||||
import { useTheme } from "next-themes";
|
||||
// logo/ images
|
||||
import PlaneBackgroundPatternDark from "public/auth/background-pattern-dark.svg";
|
||||
import PlaneBackgroundPattern from "public/auth/background-pattern.svg";
|
||||
import BlackHorizontalLogo from "public/plane-logos/black-horizontal-with-blue-logo.png";
|
||||
import WhiteHorizontalLogo from "public/plane-logos/white-horizontal-with-blue-logo.png";
|
||||
|
||||
type TDefaultLayout = {
|
||||
children: ReactNode;
|
||||
withoutBackground?: boolean;
|
||||
};
|
||||
|
||||
export const DefaultLayout: FC<TDefaultLayout> = (props) => {
|
||||
const { children, withoutBackground = false } = props;
|
||||
// hooks
|
||||
const { resolvedTheme } = useTheme();
|
||||
const patternBackground = resolvedTheme === "dark" ? PlaneBackgroundPatternDark : PlaneBackgroundPattern;
|
||||
|
||||
const logo = resolvedTheme === "light" ? BlackHorizontalLogo : WhiteHorizontalLogo;
|
||||
|
||||
return (
|
||||
<div className="relative">
|
||||
<div className="h-screen w-full overflow-hidden overflow-y-auto flex flex-col">
|
||||
<div className="container h-[110px] flex-shrink-0 mx-auto px-5 lg:px-0 flex items-center justify-between gap-5 z-50">
|
||||
<div className="flex items-center gap-x-2 py-10">
|
||||
<Link href={`/`} className="h-[30px] w-[133px]">
|
||||
<Image src={logo} alt="Plane logo" />
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
{!withoutBackground && (
|
||||
<div className="absolute inset-0 z-0">
|
||||
<Image src={patternBackground} className="w-screen h-full object-cover" alt="Plane background pattern" />
|
||||
</div>
|
||||
)}
|
||||
<div className="relative z-10 flex-grow">{children}</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -1,55 +0,0 @@
|
||||
import { FC, ReactNode } from "react";
|
||||
import { observer } from "mobx-react";
|
||||
import useSWR from "swr";
|
||||
// components
|
||||
import { LogoSpinner } from "@/components/common";
|
||||
import { InstanceSetupForm, InstanceFailureView } from "@/components/instance";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// layout
|
||||
import { DefaultLayout } from "@/layouts/default-layout";
|
||||
|
||||
type InstanceProviderProps = {
|
||||
children: ReactNode;
|
||||
};
|
||||
|
||||
export const InstanceProvider: FC<InstanceProviderProps> = observer((props) => {
|
||||
const { children } = props;
|
||||
// store hooks
|
||||
const { instance, error, fetchInstanceInfo } = useInstance();
|
||||
// fetching instance details
|
||||
useSWR("INSTANCE_DETAILS", () => fetchInstanceInfo(), {
|
||||
revalidateOnFocus: false,
|
||||
revalidateIfStale: false,
|
||||
errorRetryCount: 0,
|
||||
});
|
||||
|
||||
if (!instance && !error)
|
||||
return (
|
||||
<div className="flex h-screen min-h-[500px] w-full justify-center items-center">
|
||||
<LogoSpinner />
|
||||
</div>
|
||||
);
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<DefaultLayout>
|
||||
<div className="relative h-full w-full overflow-y-auto px-6 py-10 mx-auto flex justify-center items-center">
|
||||
<InstanceFailureView />
|
||||
</div>
|
||||
</DefaultLayout>
|
||||
);
|
||||
}
|
||||
|
||||
if (!instance?.is_setup_done) {
|
||||
return (
|
||||
<DefaultLayout>
|
||||
<div className="relative h-full w-full overflow-y-auto px-6 py-10 mx-auto flex justify-center items-center">
|
||||
<InstanceSetupForm />
|
||||
</div>
|
||||
</DefaultLayout>
|
||||
);
|
||||
}
|
||||
|
||||
return <>{children}</>;
|
||||
});
|
||||
@@ -1,31 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { FC, ReactNode, useEffect } from "react";
|
||||
import { observer } from "mobx-react";
|
||||
import useSWR from "swr";
|
||||
// hooks
|
||||
import { useInstance, useTheme, useUser } from "@/hooks/store";
|
||||
|
||||
interface IUserProvider {
|
||||
children: ReactNode;
|
||||
}
|
||||
|
||||
export const UserProvider: FC<IUserProvider> = observer(({ children }) => {
|
||||
// hooks
|
||||
const { isSidebarCollapsed, toggleSidebar } = useTheme();
|
||||
const { currentUser, fetchCurrentUser } = useUser();
|
||||
const { fetchInstanceAdmins } = useInstance();
|
||||
|
||||
useSWR("CURRENT_USER", () => fetchCurrentUser(), {
|
||||
shouldRetryOnError: false,
|
||||
});
|
||||
useSWR("INSTANCE_ADMINS", () => fetchInstanceAdmins());
|
||||
|
||||
useEffect(() => {
|
||||
const localValue = localStorage && localStorage.getItem("god_mode_sidebar_collapsed");
|
||||
const localBoolValue = localValue ? (localValue === "true" ? true : false) : false;
|
||||
if (isSidebarCollapsed === undefined && localBoolValue != isSidebarCollapsed) toggleSidebar(localBoolValue);
|
||||
}, [isSidebarCollapsed, currentUser, toggleSidebar]);
|
||||
|
||||
return <>{children}</>;
|
||||
});
|
||||
@@ -1,190 +0,0 @@
|
||||
import set from "lodash/set";
|
||||
import { observable, action, computed, makeObservable, runInAction } from "mobx";
|
||||
// plane internal packages
|
||||
import { EInstanceStatus, TInstanceStatus } from "@plane/constants";
|
||||
import { InstanceService } from "@plane/services";
|
||||
import {
|
||||
IInstance,
|
||||
IInstanceAdmin,
|
||||
IInstanceConfiguration,
|
||||
IFormattedInstanceConfiguration,
|
||||
IInstanceInfo,
|
||||
IInstanceConfig,
|
||||
} from "@plane/types";
|
||||
// root store
|
||||
import { CoreRootStore } from "@/store/root.store";
|
||||
|
||||
export interface IInstanceStore {
|
||||
// issues
|
||||
isLoading: boolean;
|
||||
error: any;
|
||||
instanceStatus: TInstanceStatus | undefined;
|
||||
instance: IInstance | undefined;
|
||||
config: IInstanceConfig | undefined;
|
||||
instanceAdmins: IInstanceAdmin[] | undefined;
|
||||
instanceConfigurations: IInstanceConfiguration[] | undefined;
|
||||
// computed
|
||||
formattedConfig: IFormattedInstanceConfiguration | undefined;
|
||||
// action
|
||||
hydrate: (data: IInstanceInfo) => void;
|
||||
fetchInstanceInfo: () => Promise<IInstanceInfo | undefined>;
|
||||
updateInstanceInfo: (data: Partial<IInstance>) => Promise<IInstance | undefined>;
|
||||
fetchInstanceAdmins: () => Promise<IInstanceAdmin[] | undefined>;
|
||||
fetchInstanceConfigurations: () => Promise<IInstanceConfiguration[] | undefined>;
|
||||
updateInstanceConfigurations: (data: Partial<IFormattedInstanceConfiguration>) => Promise<IInstanceConfiguration[]>;
|
||||
}
|
||||
|
||||
export class InstanceStore implements IInstanceStore {
|
||||
isLoading: boolean = true;
|
||||
error: any = undefined;
|
||||
instanceStatus: TInstanceStatus | undefined = undefined;
|
||||
instance: IInstance | undefined = undefined;
|
||||
config: IInstanceConfig | undefined = undefined;
|
||||
instanceAdmins: IInstanceAdmin[] | undefined = undefined;
|
||||
instanceConfigurations: IInstanceConfiguration[] | undefined = undefined;
|
||||
// service
|
||||
instanceService;
|
||||
|
||||
constructor(private store: CoreRootStore) {
|
||||
makeObservable(this, {
|
||||
// observable
|
||||
isLoading: observable.ref,
|
||||
error: observable.ref,
|
||||
instanceStatus: observable,
|
||||
instance: observable,
|
||||
instanceAdmins: observable,
|
||||
instanceConfigurations: observable,
|
||||
// computed
|
||||
formattedConfig: computed,
|
||||
// actions
|
||||
hydrate: action,
|
||||
fetchInstanceInfo: action,
|
||||
fetchInstanceAdmins: action,
|
||||
updateInstanceInfo: action,
|
||||
fetchInstanceConfigurations: action,
|
||||
updateInstanceConfigurations: action,
|
||||
});
|
||||
|
||||
this.instanceService = new InstanceService();
|
||||
}
|
||||
|
||||
hydrate = (data: IInstanceInfo) => {
|
||||
if (data) {
|
||||
this.instance = data.instance;
|
||||
this.config = data.config;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* computed value for instance configurations data for forms.
|
||||
* @returns configurations in the form of {key, value} pair.
|
||||
*/
|
||||
get formattedConfig() {
|
||||
if (!this.instanceConfigurations) return undefined;
|
||||
return this.instanceConfigurations?.reduce((formData: IFormattedInstanceConfiguration, config) => {
|
||||
formData[config.key] = config.value;
|
||||
return formData;
|
||||
}, {} as IFormattedInstanceConfiguration);
|
||||
}
|
||||
|
||||
/**
|
||||
* @description fetching instance configuration
|
||||
* @returns {IInstance} instance
|
||||
*/
|
||||
fetchInstanceInfo = async () => {
|
||||
try {
|
||||
if (this.instance === undefined) this.isLoading = true;
|
||||
this.error = undefined;
|
||||
const instanceInfo = await this.instanceService.info();
|
||||
// handling the new user popup toggle
|
||||
if (this.instance === undefined && !instanceInfo?.instance?.workspaces_exist)
|
||||
this.store.theme.toggleNewUserPopup();
|
||||
runInAction(() => {
|
||||
console.log("instanceInfo: ", instanceInfo);
|
||||
this.isLoading = false;
|
||||
this.instance = instanceInfo.instance;
|
||||
this.config = instanceInfo.config;
|
||||
});
|
||||
return instanceInfo;
|
||||
} catch (error) {
|
||||
console.error("Error fetching the instance info");
|
||||
this.isLoading = false;
|
||||
this.error = { message: "Failed to fetch the instance info" };
|
||||
this.instanceStatus = {
|
||||
status: EInstanceStatus.ERROR,
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @description updating instance information
|
||||
* @param {Partial<IInstance>} data
|
||||
* @returns void
|
||||
*/
|
||||
updateInstanceInfo = async (data: Partial<IInstance>) => {
|
||||
try {
|
||||
const instanceResponse = await this.instanceService.update(data);
|
||||
if (instanceResponse) {
|
||||
runInAction(() => {
|
||||
if (this.instance) set(this.instance, "instance", instanceResponse);
|
||||
});
|
||||
}
|
||||
return instanceResponse;
|
||||
} catch (error) {
|
||||
console.error("Error updating the instance info");
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @description fetching instance admins
|
||||
* @return {IInstanceAdmin[]} instanceAdmins
|
||||
*/
|
||||
fetchInstanceAdmins = async () => {
|
||||
try {
|
||||
const instanceAdmins = await this.instanceService.admins();
|
||||
if (instanceAdmins) runInAction(() => (this.instanceAdmins = instanceAdmins));
|
||||
return instanceAdmins;
|
||||
} catch (error) {
|
||||
console.error("Error fetching the instance admins");
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @description fetching instance configurations
|
||||
* @return {IInstanceAdmin[]} instanceConfigurations
|
||||
*/
|
||||
fetchInstanceConfigurations = async () => {
|
||||
try {
|
||||
const instanceConfigurations = await this.instanceService.configurations();
|
||||
if (instanceConfigurations) runInAction(() => (this.instanceConfigurations = instanceConfigurations));
|
||||
return instanceConfigurations;
|
||||
} catch (error) {
|
||||
console.error("Error fetching the instance configurations");
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @description updating instance configurations
|
||||
* @param data
|
||||
*/
|
||||
updateInstanceConfigurations = async (data: Partial<IFormattedInstanceConfiguration>) => {
|
||||
try {
|
||||
const response = await this.instanceService.updateConfigurations(data);
|
||||
runInAction(() => {
|
||||
this.instanceConfigurations = this.instanceConfigurations?.map((config) => {
|
||||
const item = response.find((item) => item.key === config.key);
|
||||
if (item) return item;
|
||||
return config;
|
||||
});
|
||||
});
|
||||
return response;
|
||||
} catch (error) {
|
||||
console.error("Error updating the instance configurations");
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
/** @type {import('next').NextConfig} */
|
||||
|
||||
const nextConfig = {
|
||||
trailingSlash: true,
|
||||
reactStrictMode: false,
|
||||
swcMinify: true,
|
||||
output: "standalone",
|
||||
images: {
|
||||
unoptimized: true,
|
||||
},
|
||||
basePath: process.env.NEXT_PUBLIC_ADMIN_BASE_PATH || "",
|
||||
transpilePackages: [
|
||||
"@plane/constants",
|
||||
"@plane/editor",
|
||||
"@plane/hooks",
|
||||
"@plane/i18n",
|
||||
"@plane/logger",
|
||||
"@plane/propel",
|
||||
"@plane/services",
|
||||
"@plane/shared-state",
|
||||
"@plane/types",
|
||||
"@plane/ui",
|
||||
"@plane/utils",
|
||||
],
|
||||
};
|
||||
|
||||
module.exports = nextConfig;
|
||||
@@ -1,55 +0,0 @@
|
||||
{
|
||||
"name": "admin",
|
||||
"description": "Admin UI for Plane",
|
||||
"version": "0.26.1",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "turbo run develop",
|
||||
"develop": "next dev --port 3001",
|
||||
"build": "next build",
|
||||
"preview": "next build && next start",
|
||||
"start": "next start",
|
||||
"format": "prettier --write .",
|
||||
"lint": "eslint . --ext .ts,.tsx",
|
||||
"lint:errors": "eslint . --ext .ts,.tsx --quiet"
|
||||
},
|
||||
"dependencies": {
|
||||
"@headlessui/react": "^1.7.19",
|
||||
"@plane/constants": "*",
|
||||
"@plane/hooks": "*",
|
||||
"@plane/propel": "*",
|
||||
"@plane/services": "*",
|
||||
"@plane/types": "*",
|
||||
"@plane/ui": "*",
|
||||
"@plane/utils": "*",
|
||||
"@tailwindcss/typography": "^0.5.9",
|
||||
"@types/lodash": "^4.17.0",
|
||||
"autoprefixer": "10.4.14",
|
||||
"axios": "^1.8.3",
|
||||
"lodash": "^4.17.21",
|
||||
"lucide-react": "^0.469.0",
|
||||
"mobx": "^6.12.0",
|
||||
"mobx-react": "^9.1.1",
|
||||
"next": "^14.2.29",
|
||||
"next-themes": "^0.2.1",
|
||||
"postcss": "^8.4.38",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-hook-form": "7.51.5",
|
||||
"swr": "^2.2.4",
|
||||
"uuid": "^9.0.1",
|
||||
"zxcvbn": "^4.4.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@plane/eslint-config": "*",
|
||||
"@plane/tailwind-config": "*",
|
||||
"@plane/typescript-config": "*",
|
||||
"@types/node": "18.16.1",
|
||||
"@types/react": "^18.3.11",
|
||||
"@types/react-dom": "^18.2.18",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"@types/zxcvbn": "^4.4.4",
|
||||
"typescript": "5.8.3"
|
||||
}
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 2.2 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 3.7 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 2.1 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 1.6 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 1.6 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 919 B |
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"name": "",
|
||||
"short_name": "",
|
||||
"icons": [
|
||||
{ "src": "/android-chrome-192x192.png", "sizes": "192x192", "type": "image/png" },
|
||||
{ "src": "/android-chrome-512x512.png", "sizes": "512x512", "type": "image/png" }
|
||||
],
|
||||
"theme_color": "#ffffff",
|
||||
"background_color": "#ffffff",
|
||||
"display": "standalone"
|
||||
}
|
||||
@@ -1,439 +0,0 @@
|
||||
@import "@plane/propel/styles/fonts";
|
||||
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
@layer components {
|
||||
.text-1\.5xl {
|
||||
font-size: 1.375rem;
|
||||
line-height: 1.875rem;
|
||||
}
|
||||
|
||||
.text-2\.5xl {
|
||||
font-size: 1.75rem;
|
||||
line-height: 2.25rem;
|
||||
}
|
||||
}
|
||||
|
||||
@layer base {
|
||||
html {
|
||||
font-family: "Inter", sans-serif;
|
||||
}
|
||||
|
||||
:root {
|
||||
color-scheme: light !important;
|
||||
|
||||
--color-primary-10: 236, 241, 255;
|
||||
--color-primary-20: 217, 228, 255;
|
||||
--color-primary-30: 197, 214, 255;
|
||||
--color-primary-40: 178, 200, 255;
|
||||
--color-primary-50: 159, 187, 255;
|
||||
--color-primary-60: 140, 173, 255;
|
||||
--color-primary-70: 121, 159, 255;
|
||||
--color-primary-80: 101, 145, 255;
|
||||
--color-primary-90: 82, 132, 255;
|
||||
--color-primary-100: 63, 118, 255;
|
||||
--color-primary-200: 57, 106, 230;
|
||||
--color-primary-300: 50, 94, 204;
|
||||
--color-primary-400: 44, 83, 179;
|
||||
--color-primary-500: 38, 71, 153;
|
||||
--color-primary-600: 32, 59, 128;
|
||||
--color-primary-700: 25, 47, 102;
|
||||
--color-primary-800: 19, 35, 76;
|
||||
--color-primary-900: 13, 24, 51;
|
||||
|
||||
--color-background-100: 255, 255, 255; /* primary bg */
|
||||
--color-background-90: 247, 247, 247; /* secondary bg */
|
||||
--color-background-80: 232, 232, 232; /* tertiary bg */
|
||||
|
||||
--color-text-100: 23, 23, 23; /* primary text */
|
||||
--color-text-200: 58, 58, 58; /* secondary text */
|
||||
--color-text-300: 82, 82, 82; /* tertiary text */
|
||||
--color-text-400: 163, 163, 163; /* placeholder text */
|
||||
|
||||
--color-scrollbar: 163, 163, 163; /* scrollbar thumb */
|
||||
|
||||
--color-border-100: 245, 245, 245; /* subtle border= 1 */
|
||||
--color-border-200: 229, 229, 229; /* subtle border- 2 */
|
||||
--color-border-300: 212, 212, 212; /* strong border- 1 */
|
||||
--color-border-400: 185, 185, 185; /* strong border- 2 */
|
||||
|
||||
--color-shadow-2xs:
|
||||
0px 0px 1px 0px rgba(23, 23, 23, 0.06), 0px 1px 2px 0px rgba(23, 23, 23, 0.06),
|
||||
0px 1px 2px 0px rgba(23, 23, 23, 0.14);
|
||||
--color-shadow-xs:
|
||||
0px 1px 2px 0px rgba(0, 0, 0, 0.16), 0px 2px 4px 0px rgba(16, 24, 40, 0.12),
|
||||
0px 1px 8px -1px rgba(16, 24, 40, 0.1);
|
||||
--color-shadow-sm:
|
||||
0px 1px 4px 0px rgba(0, 0, 0, 0.01), 0px 4px 8px 0px rgba(0, 0, 0, 0.02), 0px 1px 12px 0px rgba(0, 0, 0, 0.12);
|
||||
--color-shadow-rg:
|
||||
0px 3px 6px 0px rgba(0, 0, 0, 0.1), 0px 4px 4px 0px rgba(16, 24, 40, 0.08),
|
||||
0px 1px 12px 0px rgba(16, 24, 40, 0.04);
|
||||
--color-shadow-md:
|
||||
0px 4px 8px 0px rgba(0, 0, 0, 0.12), 0px 6px 12px 0px rgba(16, 24, 40, 0.12),
|
||||
0px 1px 16px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-lg:
|
||||
0px 6px 12px 0px rgba(0, 0, 0, 0.12), 0px 8px 16px 0px rgba(0, 0, 0, 0.12),
|
||||
0px 1px 24px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-xl:
|
||||
0px 0px 18px 0px rgba(0, 0, 0, 0.16), 0px 0px 24px 0px rgba(16, 24, 40, 0.16),
|
||||
0px 0px 52px 0px rgba(16, 24, 40, 0.16);
|
||||
--color-shadow-2xl:
|
||||
0px 8px 16px 0px rgba(0, 0, 0, 0.12), 0px 12px 24px 0px rgba(16, 24, 40, 0.12),
|
||||
0px 1px 32px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-3xl:
|
||||
0px 12px 24px 0px rgba(0, 0, 0, 0.12), 0px 16px 32px 0px rgba(0, 0, 0, 0.12),
|
||||
0px 1px 48px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-4xl: 0px 8px 40px 0px rgba(0, 0, 61, 0.05), 0px 12px 32px -16px rgba(0, 0, 0, 0.05);
|
||||
|
||||
--color-sidebar-background-100: var(--color-background-100); /* primary sidebar bg */
|
||||
--color-sidebar-background-90: var(--color-background-90); /* secondary sidebar bg */
|
||||
--color-sidebar-background-80: var(--color-background-80); /* tertiary sidebar bg */
|
||||
|
||||
--color-sidebar-text-100: var(--color-text-100); /* primary sidebar text */
|
||||
--color-sidebar-text-200: var(--color-text-200); /* secondary sidebar text */
|
||||
--color-sidebar-text-300: var(--color-text-300); /* tertiary sidebar text */
|
||||
--color-sidebar-text-400: var(--color-text-400); /* sidebar placeholder text */
|
||||
|
||||
--color-sidebar-border-100: var(--color-border-100); /* subtle sidebar border= 1 */
|
||||
--color-sidebar-border-200: var(--color-border-100); /* subtle sidebar border- 2 */
|
||||
--color-sidebar-border-300: var(--color-border-100); /* strong sidebar border- 1 */
|
||||
--color-sidebar-border-400: var(--color-border-100); /* strong sidebar border- 2 */
|
||||
|
||||
--color-sidebar-shadow-2xs: var(--color-shadow-2xs);
|
||||
--color-sidebar-shadow-xs: var(--color-shadow-xs);
|
||||
--color-sidebar-shadow-sm: var(--color-shadow-sm);
|
||||
--color-sidebar-shadow-rg: var(--color-shadow-rg);
|
||||
--color-sidebar-shadow-md: var(--color-shadow-md);
|
||||
--color-sidebar-shadow-lg: var(--color-shadow-lg);
|
||||
--color-sidebar-shadow-xl: var(--color-shadow-xl);
|
||||
--color-sidebar-shadow-2xl: var(--color-shadow-2xl);
|
||||
--color-sidebar-shadow-3xl: var(--color-shadow-3xl);
|
||||
--color-sidebar-shadow-4xl: var(--color-shadow-4xl);
|
||||
}
|
||||
|
||||
[data-theme="light"],
|
||||
[data-theme="light-contrast"] {
|
||||
color-scheme: light !important;
|
||||
|
||||
--color-background-100: 255, 255, 255; /* primary bg */
|
||||
--color-background-90: 247, 247, 247; /* secondary bg */
|
||||
--color-background-80: 232, 232, 232; /* tertiary bg */
|
||||
}
|
||||
|
||||
[data-theme="light"] {
|
||||
--color-text-100: 23, 23, 23; /* primary text */
|
||||
--color-text-200: 58, 58, 58; /* secondary text */
|
||||
--color-text-300: 82, 82, 82; /* tertiary text */
|
||||
--color-text-400: 163, 163, 163; /* placeholder text */
|
||||
|
||||
--color-scrollbar: 163, 163, 163; /* scrollbar thumb */
|
||||
|
||||
--color-border-100: 245, 245, 245; /* subtle border= 1 */
|
||||
--color-border-200: 229, 229, 229; /* subtle border- 2 */
|
||||
--color-border-300: 212, 212, 212; /* strong border- 1 */
|
||||
--color-border-400: 185, 185, 185; /* strong border- 2 */
|
||||
|
||||
/* onboarding colors */
|
||||
--gradient-onboarding-100: linear-gradient(106deg, #f2f6ff 29.8%, #e1eaff 99.34%);
|
||||
--gradient-onboarding-200: linear-gradient(129deg, rgba(255, 255, 255, 0) -22.23%, rgba(255, 255, 255, 0.8) 62.98%);
|
||||
--gradient-onboarding-300: linear-gradient(164deg, #fff 4.25%, rgba(255, 255, 255, 0.06) 93.5%);
|
||||
--gradient-onboarding-400: linear-gradient(129deg, rgba(255, 255, 255, 0) -22.23%, rgba(255, 255, 255, 0.8) 62.98%);
|
||||
|
||||
--color-onboarding-text-100: 23, 23, 23;
|
||||
--color-onboarding-text-200: 58, 58, 58;
|
||||
--color-onboarding-text-300: 82, 82, 82;
|
||||
--color-onboarding-text-400: 163, 163, 163;
|
||||
|
||||
--color-onboarding-background-100: 236, 241, 255;
|
||||
--color-onboarding-background-200: 255, 255, 255;
|
||||
--color-onboarding-background-300: 236, 241, 255;
|
||||
--color-onboarding-background-400: 177, 206, 250;
|
||||
|
||||
--color-onboarding-border-100: 229, 229, 229;
|
||||
--color-onboarding-border-200: 217, 228, 255;
|
||||
--color-onboarding-border-300: 229, 229, 229, 0.5;
|
||||
|
||||
--color-onboarding-shadow-sm: 0px 4px 20px 0px rgba(126, 139, 171, 0.1);
|
||||
|
||||
/* toast theme */
|
||||
--color-toast-success-text: 62, 155, 79;
|
||||
--color-toast-error-text: 220, 62, 66;
|
||||
--color-toast-warning-text: 255, 186, 24;
|
||||
--color-toast-info-text: 51, 88, 212;
|
||||
--color-toast-loading-text: 28, 32, 36;
|
||||
--color-toast-secondary-text: 128, 131, 141;
|
||||
--color-toast-tertiary-text: 96, 100, 108;
|
||||
|
||||
--color-toast-success-background: 253, 253, 254;
|
||||
--color-toast-error-background: 255, 252, 252;
|
||||
--color-toast-warning-background: 254, 253, 251;
|
||||
--color-toast-info-background: 253, 253, 254;
|
||||
--color-toast-loading-background: 253, 253, 254;
|
||||
|
||||
--color-toast-success-border: 218, 241, 219;
|
||||
--color-toast-error-border: 255, 219, 220;
|
||||
--color-toast-warning-border: 255, 247, 194;
|
||||
--color-toast-info-border: 210, 222, 255;
|
||||
--color-toast-loading-border: 224, 225, 230;
|
||||
}
|
||||
|
||||
[data-theme="light-contrast"] {
|
||||
--color-text-100: 11, 11, 11; /* primary text */
|
||||
--color-text-200: 38, 38, 38; /* secondary text */
|
||||
--color-text-300: 58, 58, 58; /* tertiary text */
|
||||
--color-text-400: 115, 115, 115; /* placeholder text */
|
||||
|
||||
--color-scrollbar: 115, 115, 115; /* scrollbar thumb */
|
||||
|
||||
--color-border-100: 34, 34, 34; /* subtle border= 1 */
|
||||
--color-border-200: 38, 38, 38; /* subtle border- 2 */
|
||||
--color-border-300: 46, 46, 46; /* strong border- 1 */
|
||||
--color-border-400: 58, 58, 58; /* strong border- 2 */
|
||||
}
|
||||
|
||||
[data-theme="dark"],
|
||||
[data-theme="dark-contrast"] {
|
||||
color-scheme: dark !important;
|
||||
|
||||
--color-background-100: 25, 25, 25; /* primary bg */
|
||||
--color-background-90: 32, 32, 32; /* secondary bg */
|
||||
--color-background-80: 44, 44, 44; /* tertiary bg */
|
||||
|
||||
--color-shadow-2xs: 0px 0px 1px 0px rgba(0, 0, 0, 0.15), 0px 1px 3px 0px rgba(0, 0, 0, 0.5);
|
||||
--color-shadow-xs: 0px 0px 2px 0px rgba(0, 0, 0, 0.2), 0px 2px 4px 0px rgba(0, 0, 0, 0.5);
|
||||
--color-shadow-sm: 0px 0px 4px 0px rgba(0, 0, 0, 0.2), 0px 2px 6px 0px rgba(0, 0, 0, 0.5);
|
||||
--color-shadow-rg: 0px 0px 6px 0px rgba(0, 0, 0, 0.2), 0px 4px 6px 0px rgba(0, 0, 0, 0.5);
|
||||
--color-shadow-md: 0px 2px 8px 0px rgba(0, 0, 0, 0.2), 0px 4px 8px 0px rgba(0, 0, 0, 0.5);
|
||||
--color-shadow-lg: 0px 4px 12px 0px rgba(0, 0, 0, 0.25), 0px 4px 10px 0px rgba(0, 0, 0, 0.55);
|
||||
--color-shadow-xl: 0px 0px 14px 0px rgba(0, 0, 0, 0.25), 0px 6px 10px 0px rgba(0, 0, 0, 0.55);
|
||||
--color-shadow-2xl: 0px 0px 18px 0px rgba(0, 0, 0, 0.25), 0px 8px 12px 0px rgba(0, 0, 0, 0.6);
|
||||
--color-shadow-3xl: 0px 4px 24px 0px rgba(0, 0, 0, 0.3), 0px 12px 40px 0px rgba(0, 0, 0, 0.65);
|
||||
}
|
||||
|
||||
[data-theme="dark"] {
|
||||
--color-text-100: 229, 229, 229; /* primary text */
|
||||
--color-text-200: 163, 163, 163; /* secondary text */
|
||||
--color-text-300: 115, 115, 115; /* tertiary text */
|
||||
--color-text-400: 82, 82, 82; /* placeholder text */
|
||||
|
||||
--color-scrollbar: 82, 82, 82; /* scrollbar thumb */
|
||||
|
||||
--color-border-100: 34, 34, 34; /* subtle border= 1 */
|
||||
--color-border-200: 38, 38, 38; /* subtle border- 2 */
|
||||
--color-border-300: 46, 46, 46; /* strong border- 1 */
|
||||
--color-border-400: 58, 58, 58; /* strong border- 2 */
|
||||
|
||||
/* onboarding colors */
|
||||
--gradient-onboarding-100: linear-gradient(106deg, #18191b 25.17%, #18191b 99.34%);
|
||||
--gradient-onboarding-200: linear-gradient(129deg, rgba(47, 49, 53, 0.8) -22.23%, rgba(33, 34, 37, 0.8) 62.98%);
|
||||
--gradient-onboarding-300: linear-gradient(167deg, rgba(47, 49, 53, 0.45) 19.22%, #212225 98.48%);
|
||||
|
||||
--color-onboarding-text-100: 237, 238, 240;
|
||||
--color-onboarding-text-200: 176, 180, 187;
|
||||
--color-onboarding-text-300: 118, 123, 132;
|
||||
--color-onboarding-text-400: 105, 110, 119;
|
||||
|
||||
--color-onboarding-background-100: 54, 58, 64;
|
||||
--color-onboarding-background-200: 40, 42, 45;
|
||||
--color-onboarding-background-300: 40, 42, 45;
|
||||
--color-onboarding-background-400: 67, 72, 79;
|
||||
|
||||
--color-onboarding-border-100: 54, 58, 64;
|
||||
--color-onboarding-border-200: 54, 58, 64;
|
||||
--color-onboarding-border-300: 34, 35, 38, 0.5;
|
||||
|
||||
--color-onboarding-shadow-sm: 0px 4px 20px 0px rgba(39, 44, 56, 0.1);
|
||||
|
||||
/* toast theme */
|
||||
--color-toast-success-text: 178, 221, 181;
|
||||
--color-toast-error-text: 206, 44, 49;
|
||||
--color-toast-warning-text: 255, 186, 24;
|
||||
--color-toast-info-text: 141, 164, 239;
|
||||
--color-toast-loading-text: 255, 255, 255;
|
||||
--color-toast-secondary-text: 185, 187, 198;
|
||||
--color-toast-tertiary-text: 139, 141, 152;
|
||||
|
||||
--color-toast-success-background: 46, 46, 46;
|
||||
--color-toast-error-background: 46, 46, 46;
|
||||
--color-toast-warning-background: 46, 46, 46;
|
||||
--color-toast-info-background: 46, 46, 46;
|
||||
--color-toast-loading-background: 46, 46, 46;
|
||||
|
||||
--color-toast-success-border: 42, 126, 59;
|
||||
--color-toast-error-border: 100, 23, 35;
|
||||
--color-toast-warning-border: 79, 52, 34;
|
||||
--color-toast-info-border: 58, 91, 199;
|
||||
--color-toast-loading-border: 96, 100, 108;
|
||||
}
|
||||
|
||||
[data-theme="dark-contrast"] {
|
||||
--color-text-100: 250, 250, 250; /* primary text */
|
||||
--color-text-200: 241, 241, 241; /* secondary text */
|
||||
--color-text-300: 212, 212, 212; /* tertiary text */
|
||||
--color-text-400: 115, 115, 115; /* placeholder text */
|
||||
|
||||
--color-scrollbar: 115, 115, 115; /* scrollbar thumb */
|
||||
|
||||
--color-border-100: 245, 245, 245; /* subtle border= 1 */
|
||||
--color-border-200: 229, 229, 229; /* subtle border- 2 */
|
||||
--color-border-300: 212, 212, 212; /* strong border- 1 */
|
||||
--color-border-400: 185, 185, 185; /* strong border- 2 */
|
||||
}
|
||||
|
||||
[data-theme="light"],
|
||||
[data-theme="dark"],
|
||||
[data-theme="light-contrast"],
|
||||
[data-theme="dark-contrast"] {
|
||||
--color-primary-10: 236, 241, 255;
|
||||
--color-primary-20: 217, 228, 255;
|
||||
--color-primary-30: 197, 214, 255;
|
||||
--color-primary-40: 178, 200, 255;
|
||||
--color-primary-50: 159, 187, 255;
|
||||
--color-primary-60: 140, 173, 255;
|
||||
--color-primary-70: 121, 159, 255;
|
||||
--color-primary-80: 101, 145, 255;
|
||||
--color-primary-90: 82, 132, 255;
|
||||
--color-primary-100: 63, 118, 255;
|
||||
--color-primary-200: 57, 106, 230;
|
||||
--color-primary-300: 50, 94, 204;
|
||||
--color-primary-400: 44, 83, 179;
|
||||
--color-primary-500: 38, 71, 153;
|
||||
--color-primary-600: 32, 59, 128;
|
||||
--color-primary-700: 25, 47, 102;
|
||||
--color-primary-800: 19, 35, 76;
|
||||
--color-primary-900: 13, 24, 51;
|
||||
|
||||
--color-sidebar-background-100: var(--color-background-100); /* primary sidebar bg */
|
||||
--color-sidebar-background-90: var(--color-background-90); /* secondary sidebar bg */
|
||||
--color-sidebar-background-80: var(--color-background-80); /* tertiary sidebar bg */
|
||||
|
||||
--color-sidebar-text-100: var(--color-text-100); /* primary sidebar text */
|
||||
--color-sidebar-text-200: var(--color-text-200); /* secondary sidebar text */
|
||||
--color-sidebar-text-300: var(--color-text-300); /* tertiary sidebar text */
|
||||
--color-sidebar-text-400: var(--color-text-400); /* sidebar placeholder text */
|
||||
|
||||
--color-sidebar-border-100: var(--color-border-100); /* subtle sidebar border= 1 */
|
||||
--color-sidebar-border-200: var(--color-border-200); /* subtle sidebar border- 2 */
|
||||
--color-sidebar-border-300: var(--color-border-300); /* strong sidebar border- 1 */
|
||||
--color-sidebar-border-400: var(--color-border-400); /* strong sidebar border- 2 */
|
||||
}
|
||||
}
|
||||
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
-ms-text-size-adjust: 100%;
|
||||
font-variant-ligatures: none;
|
||||
-webkit-font-variant-ligatures: none;
|
||||
text-rendering: optimizeLegibility;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
}
|
||||
|
||||
body {
|
||||
color: rgba(var(--color-text-100));
|
||||
}
|
||||
|
||||
/* scrollbar style */
|
||||
@-moz-document url-prefix() {
|
||||
* {
|
||||
scrollbar-width: none;
|
||||
}
|
||||
.vertical-scrollbar,
|
||||
.horizontal-scrollbar {
|
||||
scrollbar-width: initial;
|
||||
scrollbar-color: rgba(96, 100, 108, 0.1) transparent;
|
||||
}
|
||||
.vertical-scrollbar:hover,
|
||||
.horizontal-scrollbar:hover {
|
||||
scrollbar-color: rgba(96, 100, 108, 0.25) transparent;
|
||||
}
|
||||
.vertical-scrollbar:active,
|
||||
.horizontal-scrollbar:active {
|
||||
scrollbar-color: rgba(96, 100, 108, 0.7) transparent;
|
||||
}
|
||||
}
|
||||
|
||||
.vertical-scrollbar {
|
||||
overflow-y: auto;
|
||||
}
|
||||
.horizontal-scrollbar {
|
||||
overflow-x: auto;
|
||||
}
|
||||
.vertical-scrollbar::-webkit-scrollbar,
|
||||
.horizontal-scrollbar::-webkit-scrollbar {
|
||||
display: block;
|
||||
}
|
||||
.vertical-scrollbar::-webkit-scrollbar-track,
|
||||
.horizontal-scrollbar::-webkit-scrollbar-track {
|
||||
background-color: transparent;
|
||||
border-radius: 9999px;
|
||||
}
|
||||
.vertical-scrollbar::-webkit-scrollbar-thumb,
|
||||
.horizontal-scrollbar::-webkit-scrollbar-thumb {
|
||||
background-clip: padding-box;
|
||||
background-color: rgba(96, 100, 108, 0.1);
|
||||
border-radius: 9999px;
|
||||
}
|
||||
.vertical-scrollbar:hover::-webkit-scrollbar-thumb,
|
||||
.horizontal-scrollbar:hover::-webkit-scrollbar-thumb {
|
||||
background-color: rgba(96, 100, 108, 0.25);
|
||||
}
|
||||
.vertical-scrollbar::-webkit-scrollbar-thumb:hover,
|
||||
.horizontal-scrollbar::-webkit-scrollbar-thumb:hover {
|
||||
background-color: rgba(96, 100, 108, 0.5);
|
||||
}
|
||||
.vertical-scrollbar::-webkit-scrollbar-thumb:active,
|
||||
.horizontal-scrollbar::-webkit-scrollbar-thumb:active {
|
||||
background-color: rgba(96, 100, 108, 0.7);
|
||||
}
|
||||
.vertical-scrollbar::-webkit-scrollbar-corner,
|
||||
.horizontal-scrollbar::-webkit-scrollbar-corner {
|
||||
background-color: transparent;
|
||||
}
|
||||
.vertical-scrollbar-margin-top-md::-webkit-scrollbar-track {
|
||||
margin-top: 44px;
|
||||
}
|
||||
|
||||
/* scrollbar sm size */
|
||||
.scrollbar-sm::-webkit-scrollbar {
|
||||
height: 12px;
|
||||
width: 12px;
|
||||
}
|
||||
.scrollbar-sm::-webkit-scrollbar-thumb {
|
||||
border: 3px solid rgba(0, 0, 0, 0);
|
||||
}
|
||||
/* scrollbar md size */
|
||||
.scrollbar-md::-webkit-scrollbar {
|
||||
height: 14px;
|
||||
width: 14px;
|
||||
}
|
||||
.scrollbar-md::-webkit-scrollbar-thumb {
|
||||
border: 3px solid rgba(0, 0, 0, 0);
|
||||
}
|
||||
/* scrollbar lg size */
|
||||
|
||||
.scrollbar-lg::-webkit-scrollbar {
|
||||
height: 16px;
|
||||
width: 16px;
|
||||
}
|
||||
.scrollbar-lg::-webkit-scrollbar-thumb {
|
||||
border: 4px solid rgba(0, 0, 0, 0);
|
||||
}
|
||||
/* end scrollbar style */
|
||||
|
||||
/* progress bar */
|
||||
.progress-bar {
|
||||
fill: currentColor;
|
||||
color: rgba(var(--color-sidebar-background-100));
|
||||
}
|
||||
|
||||
::-webkit-input-placeholder,
|
||||
::placeholder,
|
||||
:-ms-input-placeholder {
|
||||
color: rgb(var(--color-text-400));
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"extends": "@plane/typescript-config/nextjs.json",
|
||||
"compilerOptions": {
|
||||
"plugins": [
|
||||
{
|
||||
"name": "next"
|
||||
}
|
||||
],
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@/*": ["core/*"],
|
||||
"@/public/*": ["public/*"],
|
||||
"@/plane-admin/*": ["ce/*"],
|
||||
"@/styles/*": ["styles/*"]
|
||||
},
|
||||
"strictNullChecks": true
|
||||
},
|
||||
"include": ["next-env.d.ts", "next.config.js", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
@@ -1,182 +0,0 @@
|
||||
ARG BASE_TAG=develop
|
||||
ARG BUILD_TYPE=full
|
||||
# *****************************************************************************
|
||||
# STAGE 1: Build the project
|
||||
# *****************************************************************************
|
||||
FROM node:18-alpine AS builder
|
||||
RUN apk add --no-cache libc6-compat
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
RUN yarn global add turbo
|
||||
COPY . .
|
||||
|
||||
RUN turbo prune --scope=web --scope=space --scope=admin --docker
|
||||
|
||||
# *****************************************************************************
|
||||
# STAGE 2: Install dependencies & build the project
|
||||
# *****************************************************************************
|
||||
# Add lockfile and package.json's of isolated subworkspace
|
||||
FROM node:18-alpine AS installer
|
||||
|
||||
RUN apk add --no-cache libc6-compat
|
||||
WORKDIR /app
|
||||
|
||||
# First install the dependencies (as they change less often)
|
||||
COPY .gitignore .gitignore
|
||||
COPY --from=builder /app/out/json/ .
|
||||
COPY --from=builder /app/out/yarn.lock ./yarn.lock
|
||||
RUN yarn install
|
||||
|
||||
# # Build the project
|
||||
COPY --from=builder /app/out/full/ .
|
||||
COPY turbo.json turbo.json
|
||||
|
||||
ARG NEXT_PUBLIC_API_BASE_URL=""
|
||||
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
||||
|
||||
ARG NEXT_PUBLIC_ADMIN_BASE_URL=""
|
||||
ENV NEXT_PUBLIC_ADMIN_BASE_URL=$NEXT_PUBLIC_ADMIN_BASE_URL
|
||||
|
||||
ARG NEXT_PUBLIC_ADMIN_BASE_PATH="/god-mode"
|
||||
ENV NEXT_PUBLIC_ADMIN_BASE_PATH=$NEXT_PUBLIC_ADMIN_BASE_PATH
|
||||
|
||||
ARG NEXT_PUBLIC_SPACE_BASE_URL=""
|
||||
ENV NEXT_PUBLIC_SPACE_BASE_URL=$NEXT_PUBLIC_SPACE_BASE_URL
|
||||
|
||||
ARG NEXT_PUBLIC_SPACE_BASE_PATH="/spaces"
|
||||
ENV NEXT_PUBLIC_SPACE_BASE_PATH=$NEXT_PUBLIC_SPACE_BASE_PATH
|
||||
|
||||
ARG NEXT_PUBLIC_WEB_BASE_URL=""
|
||||
ENV NEXT_PUBLIC_WEB_BASE_URL=$NEXT_PUBLIC_WEB_BASE_URL
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
ENV TURBO_TELEMETRY_DISABLED=1
|
||||
|
||||
RUN yarn turbo run build --filter=web --filter=space --filter=admin
|
||||
|
||||
# *****************************************************************************
|
||||
# STAGE 3: Copy the project and start it
|
||||
# *****************************************************************************
|
||||
FROM makeplane/plane-aio-base:${BUILD_TYPE}-${BASE_TAG} AS runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
SHELL [ "/bin/bash", "-c" ]
|
||||
|
||||
# PYTHON APPLICATION SETUP
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
|
||||
COPY apiserver/requirements.txt ./api/
|
||||
COPY apiserver/requirements ./api/requirements
|
||||
|
||||
RUN pip install -r ./api/requirements.txt --compile --no-cache-dir
|
||||
|
||||
# Add in Django deps and generate Django's static files
|
||||
COPY apiserver/manage.py ./api/manage.py
|
||||
COPY apiserver/plane ./api/plane/
|
||||
COPY apiserver/templates ./api/templates/
|
||||
COPY package.json ./api/package.json
|
||||
|
||||
COPY apiserver/bin ./api/bin/
|
||||
|
||||
RUN chmod +x ./api/bin/*
|
||||
RUN chmod -R 777 ./api/
|
||||
|
||||
# NEXTJS BUILDS
|
||||
COPY --from=installer /app/web/next.config.js ./web/
|
||||
COPY --from=installer /app/web/package.json ./web/
|
||||
COPY --from=installer /app/web/.next/standalone ./web
|
||||
COPY --from=installer /app/web/.next/static ./web/web/.next/static
|
||||
COPY --from=installer /app/web/public ./web/web/public
|
||||
|
||||
COPY --from=installer /app/space/next.config.js ./space/
|
||||
COPY --from=installer /app/space/package.json ./space/
|
||||
COPY --from=installer /app/space/.next/standalone ./space
|
||||
COPY --from=installer /app/space/.next/static ./space/space/.next/static
|
||||
COPY --from=installer /app/space/public ./space/space/public
|
||||
|
||||
COPY --from=installer /app/admin/next.config.js ./admin/
|
||||
COPY --from=installer /app/admin/package.json ./admin/
|
||||
COPY --from=installer /app/admin/.next/standalone ./admin
|
||||
COPY --from=installer /app/admin/.next/static ./admin/admin/.next/static
|
||||
COPY --from=installer /app/admin/public ./admin/admin/public
|
||||
|
||||
ARG NEXT_PUBLIC_API_BASE_URL=""
|
||||
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
||||
|
||||
ARG NEXT_PUBLIC_ADMIN_BASE_URL=""
|
||||
ENV NEXT_PUBLIC_ADMIN_BASE_URL=$NEXT_PUBLIC_ADMIN_BASE_URL
|
||||
|
||||
ARG NEXT_PUBLIC_ADMIN_BASE_PATH="/god-mode"
|
||||
ENV NEXT_PUBLIC_ADMIN_BASE_PATH=$NEXT_PUBLIC_ADMIN_BASE_PATH
|
||||
|
||||
ARG NEXT_PUBLIC_SPACE_BASE_URL=""
|
||||
ENV NEXT_PUBLIC_SPACE_BASE_URL=$NEXT_PUBLIC_SPACE_BASE_URL
|
||||
|
||||
ARG NEXT_PUBLIC_SPACE_BASE_PATH="/spaces"
|
||||
ENV NEXT_PUBLIC_SPACE_BASE_PATH=$NEXT_PUBLIC_SPACE_BASE_PATH
|
||||
|
||||
ARG NEXT_PUBLIC_WEB_BASE_URL=""
|
||||
ENV NEXT_PUBLIC_WEB_BASE_URL=$NEXT_PUBLIC_WEB_BASE_URL
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
ENV TURBO_TELEMETRY_DISABLED=1
|
||||
|
||||
ARG BUILD_TYPE=full
|
||||
ENV BUILD_TYPE=$BUILD_TYPE
|
||||
|
||||
COPY aio/supervisord-${BUILD_TYPE}-base /app/supervisord.conf
|
||||
COPY aio/supervisord-app /app/supervisord-app
|
||||
RUN cat /app/supervisord-app >> /app/supervisord.conf && \
|
||||
rm /app/supervisord-app
|
||||
|
||||
COPY ./aio/nginx.conf /etc/nginx/nginx.conf.template
|
||||
|
||||
# if build type is full, run the below copy pg-setup.sh
|
||||
COPY aio/postgresql.conf /etc/postgresql/postgresql.conf
|
||||
COPY aio/pg-setup.sh /app/pg-setup.sh
|
||||
RUN chmod +x /app/pg-setup.sh
|
||||
|
||||
# *****************************************************************************
|
||||
# APPLICATION ENVIRONMENT SETTINGS
|
||||
# *****************************************************************************
|
||||
ENV APP_DOMAIN=localhost
|
||||
ENV WEB_URL=http://${APP_DOMAIN}
|
||||
ENV DEBUG=0
|
||||
ENV CORS_ALLOWED_ORIGINS=http://${APP_DOMAIN},https://${APP_DOMAIN}
|
||||
# Secret Key
|
||||
ENV SECRET_KEY=60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5
|
||||
# Gunicorn Workers
|
||||
ENV GUNICORN_WORKERS=1
|
||||
|
||||
ENV POSTGRES_USER="plane"
|
||||
ENV POSTGRES_PASSWORD="plane"
|
||||
ENV POSTGRES_DB="plane"
|
||||
ENV POSTGRES_HOST="localhost"
|
||||
ENV POSTGRES_PORT="5432"
|
||||
ENV DATABASE_URL="postgresql://plane:plane@localhost:5432/plane"
|
||||
|
||||
ENV REDIS_HOST="localhost"
|
||||
ENV REDIS_PORT="6379"
|
||||
ENV REDIS_URL="redis://localhost:6379"
|
||||
|
||||
ENV USE_MINIO="1"
|
||||
ENV AWS_REGION=""
|
||||
ENV AWS_ACCESS_KEY_ID="access-key"
|
||||
ENV AWS_SECRET_ACCESS_KEY="secret-key"
|
||||
ENV AWS_S3_ENDPOINT_URL="http://localhost:9000"
|
||||
ENV AWS_S3_BUCKET_NAME="uploads"
|
||||
ENV MINIO_ROOT_USER="access-key"
|
||||
ENV MINIO_ROOT_PASSWORD="secret-key"
|
||||
ENV BUCKET_NAME="uploads"
|
||||
ENV FILE_SIZE_LIMIT="5242880"
|
||||
|
||||
# *****************************************************************************
|
||||
|
||||
RUN /app/pg-setup.sh
|
||||
|
||||
CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"]
|
||||
@@ -1,73 +0,0 @@
|
||||
FROM --platform=$BUILDPLATFORM tonistiigi/binfmt AS binfmt
|
||||
|
||||
FROM python:3.12-slim
|
||||
|
||||
# Set environment variables to non-interactive for apt
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV BUILD_TYPE=full
|
||||
|
||||
SHELL [ "/bin/bash", "-c" ]
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN mkdir -p /app/{data,logs} && \
|
||||
mkdir -p /app/data/{redis,pg,minio,nginx} && \
|
||||
mkdir -p /app/logs/{access,error} && \
|
||||
mkdir -p /etc/supervisor/conf.d
|
||||
|
||||
# Update the package list and install prerequisites
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
gnupg2 curl ca-certificates lsb-release software-properties-common \
|
||||
build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev \
|
||||
libsqlite3-dev wget llvm libncurses5-dev libncursesw5-dev xz-utils \
|
||||
tk-dev libffi-dev liblzma-dev supervisor nginx nano vim ncdu \
|
||||
sudo lsof net-tools libpq-dev procps gettext
|
||||
|
||||
# Install Redis 7.2
|
||||
RUN echo "deb http://deb.debian.org/debian $(lsb_release -cs)-backports main" > /etc/apt/sources.list.d/backports.list && \
|
||||
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \
|
||||
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" > /etc/apt/sources.list.d/redis.list && \
|
||||
apt-get update && \
|
||||
apt-get install -y redis-server
|
||||
|
||||
# Install PostgreSQL 15
|
||||
ENV POSTGRES_VERSION=15
|
||||
RUN curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /usr/share/keyrings/pgdg-archive-keyring.gpg && \
|
||||
echo "deb [signed-by=/usr/share/keyrings/pgdg-archive-keyring.gpg] http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
||||
apt-get update && \
|
||||
apt-get install -y postgresql-$POSTGRES_VERSION postgresql-client-$POSTGRES_VERSION && \
|
||||
mkdir -p /var/lib/postgresql/data && \
|
||||
chown -R postgres:postgres /var/lib/postgresql
|
||||
COPY postgresql.conf /etc/postgresql/postgresql.conf
|
||||
RUN sudo -u postgres /usr/lib/postgresql/$POSTGRES_VERSION/bin/initdb -D /var/lib/postgresql/data
|
||||
|
||||
# Install MinIO
|
||||
ARG TARGETARCH
|
||||
RUN if [ "$TARGETARCH" = "amd64" ]; then \
|
||||
curl -fSl https://dl.min.io/server/minio/release/linux-amd64/minio -o /usr/local/bin/minio; \
|
||||
elif [ "$TARGETARCH" = "arm64" ]; then \
|
||||
curl -fSl https://dl.min.io/server/minio/release/linux-arm64/minio -o /usr/local/bin/minio; \
|
||||
else \
|
||||
echo "Unsupported architecture: $TARGETARCH"; exit 1; \
|
||||
fi && \
|
||||
chmod +x /usr/local/bin/minio
|
||||
|
||||
# Install Node.js 18
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \
|
||||
apt-get install -y nodejs && \
|
||||
python -m pip install --upgrade pip && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create Supervisor configuration file
|
||||
COPY supervisord-full-base /app/supervisord.conf
|
||||
COPY nginx.conf /etc/nginx/nginx.conf.template
|
||||
COPY env.sh /app/nginx-start.sh
|
||||
RUN chmod +x /app/nginx-start.sh
|
||||
|
||||
# Expose ports for Redis, PostgreSQL, and MinIO
|
||||
EXPOSE 6379 5432 9000 80 443
|
||||
|
||||
# Start Supervisor
|
||||
CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"]
|
||||
@@ -1,45 +0,0 @@
|
||||
FROM --platform=$BUILDPLATFORM tonistiigi/binfmt AS binfmt
|
||||
|
||||
FROM python:3.12-slim
|
||||
|
||||
# Set environment variables to non-interactive for apt
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV BUILD_TYPE=slim
|
||||
|
||||
SHELL [ "/bin/bash", "-c" ]
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN mkdir -p /app/{data,logs} && \
|
||||
mkdir -p /app/data/{nginx} && \
|
||||
mkdir -p /app/logs/{access,error} && \
|
||||
mkdir -p /etc/supervisor/conf.d
|
||||
|
||||
# Update the package list and install prerequisites
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
gnupg2 curl ca-certificates lsb-release software-properties-common \
|
||||
build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev \
|
||||
libsqlite3-dev wget llvm libncurses5-dev libncursesw5-dev xz-utils \
|
||||
tk-dev libffi-dev liblzma-dev supervisor nginx nano vim ncdu \
|
||||
sudo lsof net-tools libpq-dev procps gettext
|
||||
|
||||
# Install Node.js 18
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \
|
||||
apt-get install -y nodejs
|
||||
|
||||
RUN python -m pip install --upgrade pip && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create Supervisor configuration file
|
||||
COPY supervisord-slim-base /app/supervisord.conf
|
||||
COPY nginx.conf /etc/nginx/nginx.conf.template
|
||||
COPY env.sh /app/nginx-start.sh
|
||||
RUN chmod +x /app/nginx-start.sh
|
||||
|
||||
# Expose ports for Redis, PostgreSQL, and MinIO
|
||||
EXPOSE 80 443
|
||||
|
||||
# Start Supervisor
|
||||
CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"]
|
||||
@@ -1,7 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
export dollar="$"
|
||||
export http_upgrade="http_upgrade"
|
||||
export scheme="scheme"
|
||||
envsubst < /etc/nginx/nginx.conf.template > /etc/nginx/nginx.conf
|
||||
exec nginx -g 'daemon off;'
|
||||
@@ -1,72 +0,0 @@
|
||||
events {
|
||||
}
|
||||
|
||||
http {
|
||||
sendfile on;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
root /www/data/;
|
||||
access_log /var/log/nginx/access.log;
|
||||
|
||||
client_max_body_size ${FILE_SIZE_LIMIT};
|
||||
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
add_header Referrer-Policy "no-referrer-when-downgrade" always;
|
||||
add_header Permissions-Policy "interest-cohort=()" always;
|
||||
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
|
||||
add_header X-Forwarded-Proto "${dollar}scheme";
|
||||
add_header X-Forwarded-Host "${dollar}host";
|
||||
add_header X-Forwarded-For "${dollar}proxy_add_x_forwarded_for";
|
||||
add_header X-Real-IP "${dollar}remote_addr";
|
||||
|
||||
location / {
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade ${dollar}http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header Host ${dollar}http_host;
|
||||
proxy_pass http://localhost:3001/;
|
||||
}
|
||||
|
||||
location /spaces/ {
|
||||
rewrite ^/spaces/?$ /spaces/login break;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade ${dollar}http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header Host ${dollar}http_host;
|
||||
proxy_pass http://localhost:3002/spaces/;
|
||||
}
|
||||
|
||||
location /god-mode/ {
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade ${dollar}http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header Host ${dollar}http_host;
|
||||
proxy_pass http://localhost:3003/god-mode/;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade ${dollar}http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header Host ${dollar}http_host;
|
||||
proxy_pass http://localhost:8000/api/;
|
||||
}
|
||||
|
||||
location /auth/ {
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade ${dollar}http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header Host ${dollar}http_host;
|
||||
proxy_pass http://localhost:8000/auth/;
|
||||
}
|
||||
|
||||
location /${BUCKET_NAME}/ {
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade ${dollar}http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header Host ${dollar}http_host;
|
||||
proxy_pass http://localhost:9000/uploads/;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ "$BUILD_TYPE" == "full" ]; then
|
||||
|
||||
export PGHOST=localhost
|
||||
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/pg_ctl" -D /var/lib/postgresql/data start
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/psql" --command "CREATE USER $POSTGRES_USER WITH SUPERUSER PASSWORD '$POSTGRES_PASSWORD';" && \
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/createdb" -O "$POSTGRES_USER" "$POSTGRES_DB" && \
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/psql" --command "GRANT ALL PRIVILEGES ON DATABASE $POSTGRES_DB TO $POSTGRES_USER;" && \
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/pg_ctl" -D /var/lib/postgresql/data stop
|
||||
|
||||
fi
|
||||
|
||||
@@ -1,815 +0,0 @@
|
||||
# -----------------------------
|
||||
# PostgreSQL configuration file
|
||||
# -----------------------------
|
||||
#
|
||||
# This file consists of lines of the form:
|
||||
#
|
||||
# name = value
|
||||
#
|
||||
# (The "=" is optional.) Whitespace may be used. Comments are introduced with
|
||||
# "#" anywhere on a line. The complete list of parameter names and allowed
|
||||
# values can be found in the PostgreSQL documentation.
|
||||
#
|
||||
# The commented-out settings shown in this file represent the default values.
|
||||
# Re-commenting a setting is NOT sufficient to revert it to the default value;
|
||||
# you need to reload the server.
|
||||
#
|
||||
# This file is read on server startup and when the server receives a SIGHUP
|
||||
# signal. If you edit the file on a running system, you have to SIGHUP the
|
||||
# server for the changes to take effect, run "pg_ctl reload", or execute
|
||||
# "SELECT pg_reload_conf()". Some parameters, which are marked below,
|
||||
# require a server shutdown and restart to take effect.
|
||||
#
|
||||
# Any parameter can also be given as a command-line option to the server, e.g.,
|
||||
# "postgres -c log_connections=on". Some parameters can be changed at run time
|
||||
# with the "SET" SQL command.
|
||||
#
|
||||
# Memory units: B = bytes Time units: us = microseconds
|
||||
# kB = kilobytes ms = milliseconds
|
||||
# MB = megabytes s = seconds
|
||||
# GB = gigabytes min = minutes
|
||||
# TB = terabytes h = hours
|
||||
# d = days
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# FILE LOCATIONS
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# The default values of these variables are driven from the -D command-line
|
||||
# option or PGDATA environment variable, represented here as ConfigDir.
|
||||
|
||||
data_directory = '/var/lib/postgresql/data' # use data in another directory
|
||||
# (change requires restart)
|
||||
hba_file = '/etc/postgresql/15/main/pg_hba.conf' # host-based authentication file
|
||||
# (change requires restart)
|
||||
ident_file = '/etc/postgresql/15/main/pg_ident.conf' # ident configuration file
|
||||
# (change requires restart)
|
||||
|
||||
# If external_pid_file is not explicitly set, no extra PID file is written.
|
||||
external_pid_file = '/var/run/postgresql/15-main.pid' # write an extra PID file
|
||||
# (change requires restart)
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# CONNECTIONS AND AUTHENTICATION
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Connection Settings -
|
||||
|
||||
listen_addresses = 'localhost' # what IP address(es) to listen on;
|
||||
# comma-separated list of addresses;
|
||||
# defaults to 'localhost'; use '*' for all
|
||||
# (change requires restart)
|
||||
port = 5432 # (change requires restart)
|
||||
max_connections = 200 # (change requires restart)
|
||||
#superuser_reserved_connections = 3 # (change requires restart)
|
||||
unix_socket_directories = '/var/run/postgresql' # comma-separated list of directories
|
||||
# (change requires restart)
|
||||
#unix_socket_group = '' # (change requires restart)
|
||||
#unix_socket_permissions = 0777 # begin with 0 to use octal notation
|
||||
# (change requires restart)
|
||||
#bonjour = off # advertise server via Bonjour
|
||||
# (change requires restart)
|
||||
#bonjour_name = '' # defaults to the computer name
|
||||
# (change requires restart)
|
||||
|
||||
# - TCP settings -
|
||||
# see "man tcp" for details
|
||||
|
||||
#tcp_keepalives_idle = 0 # TCP_KEEPIDLE, in seconds;
|
||||
# 0 selects the system default
|
||||
#tcp_keepalives_interval = 0 # TCP_KEEPINTVL, in seconds;
|
||||
# 0 selects the system default
|
||||
#tcp_keepalives_count = 0 # TCP_KEEPCNT;
|
||||
# 0 selects the system default
|
||||
#tcp_user_timeout = 0 # TCP_USER_TIMEOUT, in milliseconds;
|
||||
# 0 selects the system default
|
||||
|
||||
#client_connection_check_interval = 0 # time between checks for client
|
||||
# disconnection while running queries;
|
||||
# 0 for never
|
||||
|
||||
# - Authentication -
|
||||
|
||||
#authentication_timeout = 1min # 1s-600s
|
||||
#password_encryption = scram-sha-256 # scram-sha-256 or md5
|
||||
#db_user_namespace = off
|
||||
|
||||
# GSSAPI using Kerberos
|
||||
#krb_server_keyfile = 'FILE:${sysconfdir}/krb5.keytab'
|
||||
#krb_caseins_users = off
|
||||
|
||||
# - SSL -
|
||||
|
||||
ssl = on
|
||||
#ssl_ca_file = ''
|
||||
ssl_cert_file = '/etc/ssl/certs/ssl-cert-snakeoil.pem'
|
||||
#ssl_crl_file = ''
|
||||
#ssl_crl_dir = ''
|
||||
ssl_key_file = '/etc/ssl/private/ssl-cert-snakeoil.key'
|
||||
#ssl_ciphers = 'HIGH:MEDIUM:+3DES:!aNULL' # allowed SSL ciphers
|
||||
#ssl_prefer_server_ciphers = on
|
||||
#ssl_ecdh_curve = 'prime256v1'
|
||||
#ssl_min_protocol_version = 'TLSv1.2'
|
||||
#ssl_max_protocol_version = ''
|
||||
#ssl_dh_params_file = ''
|
||||
#ssl_passphrase_command = ''
|
||||
#ssl_passphrase_command_supports_reload = off
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# RESOURCE USAGE (except WAL)
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Memory -
|
||||
|
||||
shared_buffers = 256MB # min 128kB
|
||||
# (change requires restart)
|
||||
#huge_pages = try # on, off, or try
|
||||
# (change requires restart)
|
||||
#huge_page_size = 0 # zero for system default
|
||||
# (change requires restart)
|
||||
#temp_buffers = 8MB # min 800kB
|
||||
#max_prepared_transactions = 0 # zero disables the feature
|
||||
# (change requires restart)
|
||||
# Caution: it is not advisable to set max_prepared_transactions nonzero unless
|
||||
# you actively intend to use prepared transactions.
|
||||
#work_mem = 4MB # min 64kB
|
||||
#hash_mem_multiplier = 2.0 # 1-1000.0 multiplier on hash table work_mem
|
||||
#maintenance_work_mem = 64MB # min 1MB
|
||||
#autovacuum_work_mem = -1 # min 1MB, or -1 to use maintenance_work_mem
|
||||
#logical_decoding_work_mem = 64MB # min 64kB
|
||||
#max_stack_depth = 2MB # min 100kB
|
||||
#shared_memory_type = mmap # the default is the first option
|
||||
# supported by the operating system:
|
||||
# mmap
|
||||
# sysv
|
||||
# windows
|
||||
# (change requires restart)
|
||||
dynamic_shared_memory_type = posix # the default is usually the first option
|
||||
# supported by the operating system:
|
||||
# posix
|
||||
# sysv
|
||||
# windows
|
||||
# mmap
|
||||
# (change requires restart)
|
||||
#min_dynamic_shared_memory = 0MB # (change requires restart)
|
||||
|
||||
# - Disk -
|
||||
|
||||
#temp_file_limit = -1 # limits per-process temp file space
|
||||
# in kilobytes, or -1 for no limit
|
||||
|
||||
# - Kernel Resources -
|
||||
|
||||
#max_files_per_process = 1000 # min 64
|
||||
# (change requires restart)
|
||||
|
||||
# - Cost-Based Vacuum Delay -
|
||||
|
||||
#vacuum_cost_delay = 0 # 0-100 milliseconds (0 disables)
|
||||
#vacuum_cost_page_hit = 1 # 0-10000 credits
|
||||
#vacuum_cost_page_miss = 2 # 0-10000 credits
|
||||
#vacuum_cost_page_dirty = 20 # 0-10000 credits
|
||||
#vacuum_cost_limit = 200 # 1-10000 credits
|
||||
|
||||
# - Background Writer -
|
||||
|
||||
#bgwriter_delay = 200ms # 10-10000ms between rounds
|
||||
#bgwriter_lru_maxpages = 100 # max buffers written/round, 0 disables
|
||||
#bgwriter_lru_multiplier = 2.0 # 0-10.0 multiplier on buffers scanned/round
|
||||
#bgwriter_flush_after = 512kB # measured in pages, 0 disables
|
||||
|
||||
# - Asynchronous Behavior -
|
||||
|
||||
#backend_flush_after = 0 # measured in pages, 0 disables
|
||||
#effective_io_concurrency = 1 # 1-1000; 0 disables prefetching
|
||||
#maintenance_io_concurrency = 10 # 1-1000; 0 disables prefetching
|
||||
#max_worker_processes = 8 # (change requires restart)
|
||||
#max_parallel_workers_per_gather = 2 # limited by max_parallel_workers
|
||||
#max_parallel_maintenance_workers = 2 # limited by max_parallel_workers
|
||||
#max_parallel_workers = 8 # number of max_worker_processes that
|
||||
# can be used in parallel operations
|
||||
#parallel_leader_participation = on
|
||||
#old_snapshot_threshold = -1 # 1min-60d; -1 disables; 0 is immediate
|
||||
# (change requires restart)
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# WRITE-AHEAD LOG
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Settings -
|
||||
|
||||
#wal_level = replica # minimal, replica, or logical
|
||||
# (change requires restart)
|
||||
#fsync = on # flush data to disk for crash safety
|
||||
# (turning this off can cause
|
||||
# unrecoverable data corruption)
|
||||
#synchronous_commit = on # synchronization level;
|
||||
# off, local, remote_write, remote_apply, or on
|
||||
#wal_sync_method = fsync # the default is the first option
|
||||
# supported by the operating system:
|
||||
# open_datasync
|
||||
# fdatasync (default on Linux and FreeBSD)
|
||||
# fsync
|
||||
# fsync_writethrough
|
||||
# open_sync
|
||||
#full_page_writes = on # recover from partial page writes
|
||||
#wal_log_hints = off # also do full page writes of non-critical updates
|
||||
# (change requires restart)
|
||||
#wal_compression = off # enables compression of full-page writes;
|
||||
# off, pglz, lz4, zstd, or on
|
||||
#wal_init_zero = on # zero-fill new WAL files
|
||||
#wal_recycle = on # recycle WAL files
|
||||
#wal_buffers = -1 # min 32kB, -1 sets based on shared_buffers
|
||||
# (change requires restart)
|
||||
#wal_writer_delay = 200ms # 1-10000 milliseconds
|
||||
#wal_writer_flush_after = 1MB # measured in pages, 0 disables
|
||||
#wal_skip_threshold = 2MB
|
||||
|
||||
#commit_delay = 0 # range 0-100000, in microseconds
|
||||
#commit_siblings = 5 # range 1-1000
|
||||
|
||||
# - Checkpoints -
|
||||
|
||||
#checkpoint_timeout = 5min # range 30s-1d
|
||||
#checkpoint_completion_target = 0.9 # checkpoint target duration, 0.0 - 1.0
|
||||
#checkpoint_flush_after = 256kB # measured in pages, 0 disables
|
||||
#checkpoint_warning = 30s # 0 disables
|
||||
max_wal_size = 1GB
|
||||
min_wal_size = 80MB
|
||||
|
||||
# - Prefetching during recovery -
|
||||
|
||||
#recovery_prefetch = try # prefetch pages referenced in the WAL?
|
||||
#wal_decode_buffer_size = 512kB # lookahead window used for prefetching
|
||||
# (change requires restart)
|
||||
|
||||
# - Archiving -
|
||||
|
||||
#archive_mode = off # enables archiving; off, on, or always
|
||||
# (change requires restart)
|
||||
#archive_library = '' # library to use to archive a logfile segment
|
||||
# (empty string indicates archive_command should
|
||||
# be used)
|
||||
#archive_command = '' # command to use to archive a logfile segment
|
||||
# placeholders: %p = path of file to archive
|
||||
# %f = file name only
|
||||
# e.g. 'test ! -f /mnt/server/archivedir/%f && cp %p /mnt/server/archivedir/%f'
|
||||
#archive_timeout = 0 # force a logfile segment switch after this
|
||||
# number of seconds; 0 disables
|
||||
|
||||
# - Archive Recovery -
|
||||
|
||||
# These are only used in recovery mode.
|
||||
|
||||
#restore_command = '' # command to use to restore an archived logfile segment
|
||||
# placeholders: %p = path of file to restore
|
||||
# %f = file name only
|
||||
# e.g. 'cp /mnt/server/archivedir/%f %p'
|
||||
#archive_cleanup_command = '' # command to execute at every restartpoint
|
||||
#recovery_end_command = '' # command to execute at completion of recovery
|
||||
|
||||
# - Recovery Target -
|
||||
|
||||
# Set these only when performing a targeted recovery.
|
||||
|
||||
#recovery_target = '' # 'immediate' to end recovery as soon as a
|
||||
# consistent state is reached
|
||||
# (change requires restart)
|
||||
#recovery_target_name = '' # the named restore point to which recovery will proceed
|
||||
# (change requires restart)
|
||||
#recovery_target_time = '' # the time stamp up to which recovery will proceed
|
||||
# (change requires restart)
|
||||
#recovery_target_xid = '' # the transaction ID up to which recovery will proceed
|
||||
# (change requires restart)
|
||||
#recovery_target_lsn = '' # the WAL LSN up to which recovery will proceed
|
||||
# (change requires restart)
|
||||
#recovery_target_inclusive = on # Specifies whether to stop:
|
||||
# just after the specified recovery target (on)
|
||||
# just before the recovery target (off)
|
||||
# (change requires restart)
|
||||
#recovery_target_timeline = 'latest' # 'current', 'latest', or timeline ID
|
||||
# (change requires restart)
|
||||
#recovery_target_action = 'pause' # 'pause', 'promote', 'shutdown'
|
||||
# (change requires restart)
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# REPLICATION
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Sending Servers -
|
||||
|
||||
# Set these on the primary and on any standby that will send replication data.
|
||||
|
||||
#max_wal_senders = 10 # max number of walsender processes
|
||||
# (change requires restart)
|
||||
#max_replication_slots = 10 # max number of replication slots
|
||||
# (change requires restart)
|
||||
#wal_keep_size = 0 # in megabytes; 0 disables
|
||||
#max_slot_wal_keep_size = -1 # in megabytes; -1 disables
|
||||
#wal_sender_timeout = 60s # in milliseconds; 0 disables
|
||||
#track_commit_timestamp = off # collect timestamp of transaction commit
|
||||
# (change requires restart)
|
||||
|
||||
# - Primary Server -
|
||||
|
||||
# These settings are ignored on a standby server.
|
||||
|
||||
#synchronous_standby_names = '' # standby servers that provide sync rep
|
||||
# method to choose sync standbys, number of sync standbys,
|
||||
# and comma-separated list of application_name
|
||||
# from standby(s); '*' = all
|
||||
#vacuum_defer_cleanup_age = 0 # number of xacts by which cleanup is delayed
|
||||
|
||||
# - Standby Servers -
|
||||
|
||||
# These settings are ignored on a primary server.
|
||||
|
||||
#primary_conninfo = '' # connection string to sending server
|
||||
#primary_slot_name = '' # replication slot on sending server
|
||||
#promote_trigger_file = '' # file name whose presence ends recovery
|
||||
#hot_standby = on # "off" disallows queries during recovery
|
||||
# (change requires restart)
|
||||
#max_standby_archive_delay = 30s # max delay before canceling queries
|
||||
# when reading WAL from archive;
|
||||
# -1 allows indefinite delay
|
||||
#max_standby_streaming_delay = 30s # max delay before canceling queries
|
||||
# when reading streaming WAL;
|
||||
# -1 allows indefinite delay
|
||||
#wal_receiver_create_temp_slot = off # create temp slot if primary_slot_name
|
||||
# is not set
|
||||
#wal_receiver_status_interval = 10s # send replies at least this often
|
||||
# 0 disables
|
||||
#hot_standby_feedback = off # send info from standby to prevent
|
||||
# query conflicts
|
||||
#wal_receiver_timeout = 60s # time that receiver waits for
|
||||
# communication from primary
|
||||
# in milliseconds; 0 disables
|
||||
#wal_retrieve_retry_interval = 5s # time to wait before retrying to
|
||||
# retrieve WAL after a failed attempt
|
||||
#recovery_min_apply_delay = 0 # minimum delay for applying changes during recovery
|
||||
|
||||
# - Subscribers -
|
||||
|
||||
# These settings are ignored on a publisher.
|
||||
|
||||
#max_logical_replication_workers = 4 # taken from max_worker_processes
|
||||
# (change requires restart)
|
||||
#max_sync_workers_per_subscription = 2 # taken from max_logical_replication_workers
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# QUERY TUNING
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Planner Method Configuration -
|
||||
|
||||
#enable_async_append = on
|
||||
#enable_bitmapscan = on
|
||||
#enable_gathermerge = on
|
||||
#enable_hashagg = on
|
||||
#enable_hashjoin = on
|
||||
#enable_incremental_sort = on
|
||||
#enable_indexscan = on
|
||||
#enable_indexonlyscan = on
|
||||
#enable_material = on
|
||||
#enable_memoize = on
|
||||
#enable_mergejoin = on
|
||||
#enable_nestloop = on
|
||||
#enable_parallel_append = on
|
||||
#enable_parallel_hash = on
|
||||
#enable_partition_pruning = on
|
||||
#enable_partitionwise_join = off
|
||||
#enable_partitionwise_aggregate = off
|
||||
#enable_seqscan = on
|
||||
#enable_sort = on
|
||||
#enable_tidscan = on
|
||||
|
||||
# - Planner Cost Constants -
|
||||
|
||||
#seq_page_cost = 1.0 # measured on an arbitrary scale
|
||||
#random_page_cost = 4.0 # same scale as above
|
||||
#cpu_tuple_cost = 0.01 # same scale as above
|
||||
#cpu_index_tuple_cost = 0.005 # same scale as above
|
||||
#cpu_operator_cost = 0.0025 # same scale as above
|
||||
#parallel_setup_cost = 1000.0 # same scale as above
|
||||
#parallel_tuple_cost = 0.1 # same scale as above
|
||||
#min_parallel_table_scan_size = 8MB
|
||||
#min_parallel_index_scan_size = 512kB
|
||||
#effective_cache_size = 4GB
|
||||
|
||||
#jit_above_cost = 100000 # perform JIT compilation if available
|
||||
# and query more expensive than this;
|
||||
# -1 disables
|
||||
#jit_inline_above_cost = 500000 # inline small functions if query is
|
||||
# more expensive than this; -1 disables
|
||||
#jit_optimize_above_cost = 500000 # use expensive JIT optimizations if
|
||||
# query is more expensive than this;
|
||||
# -1 disables
|
||||
|
||||
# - Genetic Query Optimizer -
|
||||
|
||||
#geqo = on
|
||||
#geqo_threshold = 12
|
||||
#geqo_effort = 5 # range 1-10
|
||||
#geqo_pool_size = 0 # selects default based on effort
|
||||
#geqo_generations = 0 # selects default based on effort
|
||||
#geqo_selection_bias = 2.0 # range 1.5-2.0
|
||||
#geqo_seed = 0.0 # range 0.0-1.0
|
||||
|
||||
# - Other Planner Options -
|
||||
|
||||
#default_statistics_target = 100 # range 1-10000
|
||||
#constraint_exclusion = partition # on, off, or partition
|
||||
#cursor_tuple_fraction = 0.1 # range 0.0-1.0
|
||||
#from_collapse_limit = 8
|
||||
#jit = on # allow JIT compilation
|
||||
#join_collapse_limit = 8 # 1 disables collapsing of explicit
|
||||
# JOIN clauses
|
||||
#plan_cache_mode = auto # auto, force_generic_plan or
|
||||
# force_custom_plan
|
||||
#recursive_worktable_factor = 10.0 # range 0.001-1000000
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# REPORTING AND LOGGING
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Where to Log -
|
||||
|
||||
#log_destination = 'stderr' # Valid values are combinations of
|
||||
# stderr, csvlog, jsonlog, syslog, and
|
||||
# eventlog, depending on platform.
|
||||
# csvlog and jsonlog require
|
||||
# logging_collector to be on.
|
||||
|
||||
# This is used when logging to stderr:
|
||||
#logging_collector = off # Enable capturing of stderr, jsonlog,
|
||||
# and csvlog into log files. Required
|
||||
# to be on for csvlogs and jsonlogs.
|
||||
# (change requires restart)
|
||||
|
||||
# These are only used if logging_collector is on:
|
||||
#log_directory = 'log' # directory where log files are written,
|
||||
# can be absolute or relative to PGDATA
|
||||
#log_filename = 'postgresql-%Y-%m-%d_%H%M%S.log' # log file name pattern,
|
||||
# can include strftime() escapes
|
||||
#log_file_mode = 0600 # creation mode for log files,
|
||||
# begin with 0 to use octal notation
|
||||
#log_rotation_age = 1d # Automatic rotation of logfiles will
|
||||
# happen after that time. 0 disables.
|
||||
#log_rotation_size = 10MB # Automatic rotation of logfiles will
|
||||
# happen after that much log output.
|
||||
# 0 disables.
|
||||
#log_truncate_on_rotation = off # If on, an existing log file with the
|
||||
# same name as the new log file will be
|
||||
# truncated rather than appended to.
|
||||
# But such truncation only occurs on
|
||||
# time-driven rotation, not on restarts
|
||||
# or size-driven rotation. Default is
|
||||
# off, meaning append to existing files
|
||||
# in all cases.
|
||||
|
||||
# These are relevant when logging to syslog:
|
||||
#syslog_facility = 'LOCAL0'
|
||||
#syslog_ident = 'postgres'
|
||||
#syslog_sequence_numbers = on
|
||||
#syslog_split_messages = on
|
||||
|
||||
# This is only relevant when logging to eventlog (Windows):
|
||||
# (change requires restart)
|
||||
#event_source = 'PostgreSQL'
|
||||
|
||||
# - When to Log -
|
||||
|
||||
#log_min_messages = warning # values in order of decreasing detail:
|
||||
# debug5
|
||||
# debug4
|
||||
# debug3
|
||||
# debug2
|
||||
# debug1
|
||||
# info
|
||||
# notice
|
||||
# warning
|
||||
# error
|
||||
# log
|
||||
# fatal
|
||||
# panic
|
||||
|
||||
#log_min_error_statement = error # values in order of decreasing detail:
|
||||
# debug5
|
||||
# debug4
|
||||
# debug3
|
||||
# debug2
|
||||
# debug1
|
||||
# info
|
||||
# notice
|
||||
# warning
|
||||
# error
|
||||
# log
|
||||
# fatal
|
||||
# panic (effectively off)
|
||||
|
||||
#log_min_duration_statement = -1 # -1 is disabled, 0 logs all statements
|
||||
# and their durations, > 0 logs only
|
||||
# statements running at least this number
|
||||
# of milliseconds
|
||||
|
||||
#log_min_duration_sample = -1 # -1 is disabled, 0 logs a sample of statements
|
||||
# and their durations, > 0 logs only a sample of
|
||||
# statements running at least this number
|
||||
# of milliseconds;
|
||||
# sample fraction is determined by log_statement_sample_rate
|
||||
|
||||
#log_statement_sample_rate = 1.0 # fraction of logged statements exceeding
|
||||
# log_min_duration_sample to be logged;
|
||||
# 1.0 logs all such statements, 0.0 never logs
|
||||
|
||||
|
||||
#log_transaction_sample_rate = 0.0 # fraction of transactions whose statements
|
||||
# are logged regardless of their duration; 1.0 logs all
|
||||
# statements from all transactions, 0.0 never logs
|
||||
|
||||
#log_startup_progress_interval = 10s # Time between progress updates for
|
||||
# long-running startup operations.
|
||||
# 0 disables the feature, > 0 indicates
|
||||
# the interval in milliseconds.
|
||||
|
||||
# - What to Log -
|
||||
|
||||
#debug_print_parse = off
|
||||
#debug_print_rewritten = off
|
||||
#debug_print_plan = off
|
||||
#debug_pretty_print = on
|
||||
#log_autovacuum_min_duration = 10min # log autovacuum activity;
|
||||
# -1 disables, 0 logs all actions and
|
||||
# their durations, > 0 logs only
|
||||
# actions running at least this number
|
||||
# of milliseconds.
|
||||
#log_checkpoints = on
|
||||
#log_connections = off
|
||||
#log_disconnections = off
|
||||
#log_duration = off
|
||||
#log_error_verbosity = default # terse, default, or verbose messages
|
||||
#log_hostname = off
|
||||
log_line_prefix = '%m [%p] %q%u@%d ' # special values:
|
||||
# %a = application name
|
||||
# %u = user name
|
||||
# %d = database name
|
||||
# %r = remote host and port
|
||||
# %h = remote host
|
||||
# %b = backend type
|
||||
# %p = process ID
|
||||
# %P = process ID of parallel group leader
|
||||
# %t = timestamp without milliseconds
|
||||
# %m = timestamp with milliseconds
|
||||
# %n = timestamp with milliseconds (as a Unix epoch)
|
||||
# %Q = query ID (0 if none or not computed)
|
||||
# %i = command tag
|
||||
# %e = SQL state
|
||||
# %c = session ID
|
||||
# %l = session line number
|
||||
# %s = session start timestamp
|
||||
# %v = virtual transaction ID
|
||||
# %x = transaction ID (0 if none)
|
||||
# %q = stop here in non-session
|
||||
# processes
|
||||
# %% = '%'
|
||||
# e.g. '<%u%%%d> '
|
||||
#log_lock_waits = off # log lock waits >= deadlock_timeout
|
||||
#log_recovery_conflict_waits = off # log standby recovery conflict waits
|
||||
# >= deadlock_timeout
|
||||
#log_parameter_max_length = -1 # when logging statements, limit logged
|
||||
# bind-parameter values to N bytes;
|
||||
# -1 means print in full, 0 disables
|
||||
#log_parameter_max_length_on_error = 0 # when logging an error, limit logged
|
||||
# bind-parameter values to N bytes;
|
||||
# -1 means print in full, 0 disables
|
||||
#log_statement = 'none' # none, ddl, mod, all
|
||||
#log_replication_commands = off
|
||||
#log_temp_files = -1 # log temporary files equal or larger
|
||||
# than the specified size in kilobytes;
|
||||
# -1 disables, 0 logs all temp files
|
||||
log_timezone = 'Etc/UTC'
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# PROCESS TITLE
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
cluster_name = '15/main' # added to process titles if nonempty
|
||||
# (change requires restart)
|
||||
#update_process_title = on
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# STATISTICS
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Cumulative Query and Index Statistics -
|
||||
|
||||
#track_activities = on
|
||||
#track_activity_query_size = 1024 # (change requires restart)
|
||||
#track_counts = on
|
||||
#track_io_timing = off
|
||||
#track_wal_io_timing = off
|
||||
#track_functions = none # none, pl, all
|
||||
#stats_fetch_consistency = cache
|
||||
|
||||
|
||||
# - Monitoring -
|
||||
|
||||
#compute_query_id = auto
|
||||
#log_statement_stats = off
|
||||
#log_parser_stats = off
|
||||
#log_planner_stats = off
|
||||
#log_executor_stats = off
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# AUTOVACUUM
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
#autovacuum = on # Enable autovacuum subprocess? 'on'
|
||||
# requires track_counts to also be on.
|
||||
#autovacuum_max_workers = 3 # max number of autovacuum subprocesses
|
||||
# (change requires restart)
|
||||
#autovacuum_naptime = 1min # time between autovacuum runs
|
||||
#autovacuum_vacuum_threshold = 50 # min number of row updates before
|
||||
# vacuum
|
||||
#autovacuum_vacuum_insert_threshold = 1000 # min number of row inserts
|
||||
# before vacuum; -1 disables insert
|
||||
# vacuums
|
||||
#autovacuum_analyze_threshold = 50 # min number of row updates before
|
||||
# analyze
|
||||
#autovacuum_vacuum_scale_factor = 0.2 # fraction of table size before vacuum
|
||||
#autovacuum_vacuum_insert_scale_factor = 0.2 # fraction of inserts over table
|
||||
# size before insert vacuum
|
||||
#autovacuum_analyze_scale_factor = 0.1 # fraction of table size before analyze
|
||||
#autovacuum_freeze_max_age = 200000000 # maximum XID age before forced vacuum
|
||||
# (change requires restart)
|
||||
#autovacuum_multixact_freeze_max_age = 400000000 # maximum multixact age
|
||||
# before forced vacuum
|
||||
# (change requires restart)
|
||||
#autovacuum_vacuum_cost_delay = 2ms # default vacuum cost delay for
|
||||
# autovacuum, in milliseconds;
|
||||
# -1 means use vacuum_cost_delay
|
||||
#autovacuum_vacuum_cost_limit = -1 # default vacuum cost limit for
|
||||
# autovacuum, -1 means use
|
||||
# vacuum_cost_limit
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# CLIENT CONNECTION DEFAULTS
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Statement Behavior -
|
||||
|
||||
#client_min_messages = notice # values in order of decreasing detail:
|
||||
# debug5
|
||||
# debug4
|
||||
# debug3
|
||||
# debug2
|
||||
# debug1
|
||||
# log
|
||||
# notice
|
||||
# warning
|
||||
# error
|
||||
#search_path = '"$user", public' # schema names
|
||||
#row_security = on
|
||||
#default_table_access_method = 'heap'
|
||||
#default_tablespace = '' # a tablespace name, '' uses the default
|
||||
#default_toast_compression = 'pglz' # 'pglz' or 'lz4'
|
||||
#temp_tablespaces = '' # a list of tablespace names, '' uses
|
||||
# only default tablespace
|
||||
#check_function_bodies = on
|
||||
#default_transaction_isolation = 'read committed'
|
||||
#default_transaction_read_only = off
|
||||
#default_transaction_deferrable = off
|
||||
#session_replication_role = 'origin'
|
||||
#statement_timeout = 0 # in milliseconds, 0 is disabled
|
||||
#lock_timeout = 0 # in milliseconds, 0 is disabled
|
||||
#idle_in_transaction_session_timeout = 0 # in milliseconds, 0 is disabled
|
||||
#idle_session_timeout = 0 # in milliseconds, 0 is disabled
|
||||
#vacuum_freeze_table_age = 150000000
|
||||
#vacuum_freeze_min_age = 50000000
|
||||
#vacuum_failsafe_age = 1600000000
|
||||
#vacuum_multixact_freeze_table_age = 150000000
|
||||
#vacuum_multixact_freeze_min_age = 5000000
|
||||
#vacuum_multixact_failsafe_age = 1600000000
|
||||
#bytea_output = 'hex' # hex, escape
|
||||
#xmlbinary = 'base64'
|
||||
#xmloption = 'content'
|
||||
#gin_pending_list_limit = 4MB
|
||||
|
||||
# - Locale and Formatting -
|
||||
|
||||
datestyle = 'iso, mdy'
|
||||
#intervalstyle = 'postgres'
|
||||
timezone = 'Etc/UTC'
|
||||
#timezone_abbreviations = 'Default' # Select the set of available time zone
|
||||
# abbreviations. Currently, there are
|
||||
# Default
|
||||
# Australia (historical usage)
|
||||
# India
|
||||
# You can create your own file in
|
||||
# share/timezonesets/.
|
||||
#extra_float_digits = 1 # min -15, max 3; any value >0 actually
|
||||
# selects precise output mode
|
||||
#client_encoding = sql_ascii # actually, defaults to database
|
||||
# encoding
|
||||
|
||||
# These settings are initialized by initdb, but they can be changed.
|
||||
lc_messages = 'C.UTF-8' # locale for system error message
|
||||
# strings
|
||||
lc_monetary = 'C.UTF-8' # locale for monetary formatting
|
||||
lc_numeric = 'C.UTF-8' # locale for number formatting
|
||||
lc_time = 'C.UTF-8' # locale for time formatting
|
||||
|
||||
# default configuration for text search
|
||||
default_text_search_config = 'pg_catalog.english'
|
||||
|
||||
# - Shared Library Preloading -
|
||||
|
||||
#local_preload_libraries = ''
|
||||
#session_preload_libraries = ''
|
||||
#shared_preload_libraries = '' # (change requires restart)
|
||||
#jit_provider = 'llvmjit' # JIT library to use
|
||||
|
||||
# - Other Defaults -
|
||||
|
||||
#dynamic_library_path = '$libdir'
|
||||
#extension_destdir = '' # prepend path when loading extensions
|
||||
# and shared objects (added by Debian)
|
||||
#gin_fuzzy_search_limit = 0
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# LOCK MANAGEMENT
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
#deadlock_timeout = 1s
|
||||
#max_locks_per_transaction = 64 # min 10
|
||||
# (change requires restart)
|
||||
#max_pred_locks_per_transaction = 64 # min 10
|
||||
# (change requires restart)
|
||||
#max_pred_locks_per_relation = -2 # negative values mean
|
||||
# (max_pred_locks_per_transaction
|
||||
# / -max_pred_locks_per_relation) - 1
|
||||
#max_pred_locks_per_page = 2 # min 0
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# VERSION AND PLATFORM COMPATIBILITY
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Previous PostgreSQL Versions -
|
||||
|
||||
#array_nulls = on
|
||||
#backslash_quote = safe_encoding # on, off, or safe_encoding
|
||||
#escape_string_warning = on
|
||||
#lo_compat_privileges = off
|
||||
#quote_all_identifiers = off
|
||||
#standard_conforming_strings = on
|
||||
#synchronize_seqscans = on
|
||||
|
||||
# - Other Platforms and Clients -
|
||||
|
||||
#transform_null_equals = off
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# ERROR HANDLING
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
#exit_on_error = off # terminate session on any error?
|
||||
#restart_after_crash = on # reinitialize after backend crash?
|
||||
#data_sync_retry = off # retry or panic on failure to fsync
|
||||
# data?
|
||||
# (change requires restart)
|
||||
#recovery_init_sync_method = fsync # fsync, syncfs (Linux 5.8+)
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# CONFIG FILE INCLUDES
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# These options allow settings to be loaded from files other than the
|
||||
# default postgresql.conf. Note that these are directives, not variable
|
||||
# assignments, so they can usefully be given more than once.
|
||||
|
||||
# include_dir = 'conf.d' # include files ending in '.conf' from
|
||||
# a directory, e.g., 'conf.d'
|
||||
#include_if_exists = '...' # include file only if it exists
|
||||
#include = '...' # include file
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# CUSTOMIZED OPTIONS
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# Add settings for extensions here
|
||||
@@ -1,71 +0,0 @@
|
||||
|
||||
[program:web]
|
||||
command=node /app/web/web/server.js
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3001,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:space]
|
||||
command=node /app/space/space/server.js
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3002,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:admin]
|
||||
command=node /app/admin/admin/server.js
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3003,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:migrator]
|
||||
directory=/app/api
|
||||
command=sh -c "./bin/docker-entrypoint-migrator.sh"
|
||||
autostart=true
|
||||
autorestart=false
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:api]
|
||||
directory=/app/api
|
||||
command=sh -c "./bin/docker-entrypoint-api.sh"
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:worker]
|
||||
directory=/app/api
|
||||
command=sh -c "./bin/docker-entrypoint-worker.sh"
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:beat]
|
||||
directory=/app/api
|
||||
command=sh -c "./bin/docker-entrypoint-beat.sh"
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
[supervisord]
|
||||
user=root
|
||||
nodaemon=true
|
||||
stderr_logfile=/app/logs/error/supervisor.err.log
|
||||
stdout_logfile=/app/logs/access/supervisor.log
|
||||
|
||||
[program:redis]
|
||||
directory=/app/data/redis
|
||||
command=redis-server
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/redis.err.log
|
||||
stdout_logfile=/app/logs/access/redis.log
|
||||
|
||||
[program:postgresql]
|
||||
user=postgres
|
||||
command=/usr/lib/postgresql/15/bin/postgres --config-file=/etc/postgresql/postgresql.conf
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/postgresql.err.log
|
||||
stdout_logfile=/app/logs/access/postgresql.log
|
||||
|
||||
[program:minio]
|
||||
directory=/app/data/minio
|
||||
command=minio server /app/data/minio
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/minio.err.log
|
||||
stdout_logfile=/app/logs/access/minio.log
|
||||
|
||||
[program:nginx]
|
||||
directory=/app/data/nginx
|
||||
command=/app/nginx-start.sh
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/nginx.err.log
|
||||
stdout_logfile=/app/logs/access/nginx.log
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
[supervisord]
|
||||
user=root
|
||||
nodaemon=true
|
||||
stderr_logfile=/app/logs/error/supervisor.err.log
|
||||
stdout_logfile=/app/logs/access/supervisor.log
|
||||
|
||||
[program:nginx]
|
||||
directory=/app/data/nginx
|
||||
command=/app/nginx-start.sh
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/nginx.err.log
|
||||
stdout_logfile=/app/logs/access/nginx.log
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
# Backend
|
||||
# Debug value for api server use it as 0 for production use
|
||||
DEBUG=0
|
||||
CORS_ALLOWED_ORIGINS="http://localhost:3000,http://localhost:3001,http://localhost:3002,http://localhost:3100"
|
||||
|
||||
# Database Settings
|
||||
POSTGRES_USER="plane"
|
||||
POSTGRES_PASSWORD="plane"
|
||||
POSTGRES_HOST="plane-db"
|
||||
POSTGRES_DB="plane"
|
||||
POSTGRES_PORT=5432
|
||||
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||
|
||||
# Redis Settings
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||
|
||||
# RabbitMQ Settings
|
||||
RABBITMQ_HOST="plane-mq"
|
||||
RABBITMQ_PORT="5672"
|
||||
RABBITMQ_USER="plane"
|
||||
RABBITMQ_PASSWORD="plane"
|
||||
RABBITMQ_VHOST="plane"
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
AWS_SECRET_ACCESS_KEY="secret-key"
|
||||
AWS_S3_ENDPOINT_URL="http://localhost:9000"
|
||||
# Changing this requires change in the nginx.conf for uploads if using minio setup
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
# Maximum file upload limit
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
|
||||
# Settings related to Docker
|
||||
DOCKERIZED=1 # deprecated
|
||||
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=0
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
|
||||
# Email redirections and minio domain settings
|
||||
WEB_URL="http://localhost:8000"
|
||||
|
||||
# Gunicorn Workers
|
||||
GUNICORN_WORKERS=2
|
||||
|
||||
# Base URLs
|
||||
ADMIN_BASE_URL="http://localhost:3001"
|
||||
ADMIN_BASE_PATH="/god-mode"
|
||||
|
||||
SPACE_BASE_URL="http://localhost:3002"
|
||||
SPACE_BASE_PATH="/spaces"
|
||||
|
||||
APP_BASE_URL="http://localhost:3000"
|
||||
APP_BASE_PATH=""
|
||||
|
||||
LIVE_BASE_URL="http://localhost:3100"
|
||||
LIVE_BASE_PATH="/live"
|
||||
|
||||
LIVE_SERVER_SECRET_KEY="secret-key"
|
||||
|
||||
# Hard delete files after days
|
||||
HARD_DELETE_AFTER_DAYS=60
|
||||
|
||||
# Force HTTPS for handling SSL Termination
|
||||
MINIO_ENDPOINT_SSL=0
|
||||
|
||||
# API key rate limit
|
||||
API_KEY_RATE_LIMIT="60/minute"
|
||||
@@ -1,52 +0,0 @@
|
||||
FROM python:3.12.5-alpine AS backend
|
||||
|
||||
# set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
ENV INSTANCE_CHANGELOG_URL https://sites.plane.so/pages/691ef037bcfe416a902e48cb55f59891/
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
RUN apk add --no-cache \
|
||||
"libpq" \
|
||||
"libxslt" \
|
||||
"nodejs-current" \
|
||||
"xmlsec"
|
||||
|
||||
COPY requirements.txt ./
|
||||
COPY requirements ./requirements
|
||||
RUN apk add --no-cache libffi-dev
|
||||
RUN apk add --no-cache --virtual .build-deps \
|
||||
"bash~=5.2" \
|
||||
"g++" \
|
||||
"gcc" \
|
||||
"cargo" \
|
||||
"git" \
|
||||
"make" \
|
||||
"postgresql-dev" \
|
||||
"libc-dev" \
|
||||
"linux-headers" \
|
||||
&& \
|
||||
pip install -r requirements.txt --compile --no-cache-dir \
|
||||
&& \
|
||||
apk del .build-deps
|
||||
|
||||
|
||||
# Add in Django deps and generate Django's static files
|
||||
COPY manage.py manage.py
|
||||
COPY plane plane/
|
||||
COPY templates templates/
|
||||
COPY package.json package.json
|
||||
|
||||
RUN apk --no-cache add "bash~=5.2"
|
||||
COPY ./bin ./bin/
|
||||
|
||||
RUN mkdir -p /code/plane/logs
|
||||
RUN chmod +x ./bin/*
|
||||
RUN chmod -R 777 /code
|
||||
|
||||
# Expose container port and run entry point script
|
||||
EXPOSE 8000
|
||||
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
web: gunicorn -w 4 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:$PORT --max-requests 10000 --max-requests-jitter 1000 --access-logfile -
|
||||
worker: celery -A plane worker -l info
|
||||
beat: celery -A plane beat -l INFO
|
||||
@@ -1,224 +0,0 @@
|
||||
# All the python scripts that are used for back migrations
|
||||
import uuid
|
||||
import random
|
||||
from django.contrib.auth.hashers import make_password
|
||||
from plane.db.models import ProjectIdentifier
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueComment,
|
||||
User,
|
||||
Project,
|
||||
ProjectMember,
|
||||
Label,
|
||||
Integration,
|
||||
)
|
||||
|
||||
|
||||
# Update description and description html values for old descriptions
|
||||
def update_description():
|
||||
try:
|
||||
issues = Issue.objects.all()
|
||||
updated_issues = []
|
||||
|
||||
for issue in issues:
|
||||
issue.description_html = f"<p>{issue.description}</p>"
|
||||
issue.description_stripped = issue.description
|
||||
updated_issues.append(issue)
|
||||
|
||||
Issue.objects.bulk_update(
|
||||
updated_issues, ["description_html", "description_stripped"], batch_size=100
|
||||
)
|
||||
print("Success")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
|
||||
def update_comments():
|
||||
try:
|
||||
issue_comments = IssueComment.objects.all()
|
||||
updated_issue_comments = []
|
||||
|
||||
for issue_comment in issue_comments:
|
||||
issue_comment.comment_html = f"<p>{issue_comment.comment_stripped}</p>"
|
||||
updated_issue_comments.append(issue_comment)
|
||||
|
||||
IssueComment.objects.bulk_update(
|
||||
updated_issue_comments, ["comment_html"], batch_size=100
|
||||
)
|
||||
print("Success")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
|
||||
def update_project_identifiers():
|
||||
try:
|
||||
project_identifiers = ProjectIdentifier.objects.filter(
|
||||
workspace_id=None
|
||||
).select_related("project", "project__workspace")
|
||||
updated_identifiers = []
|
||||
|
||||
for identifier in project_identifiers:
|
||||
identifier.workspace_id = identifier.project.workspace_id
|
||||
updated_identifiers.append(identifier)
|
||||
|
||||
ProjectIdentifier.objects.bulk_update(
|
||||
updated_identifiers, ["workspace_id"], batch_size=50
|
||||
)
|
||||
print("Success")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
|
||||
def update_user_empty_password():
|
||||
try:
|
||||
users = User.objects.filter(password="")
|
||||
updated_users = []
|
||||
|
||||
for user in users:
|
||||
user.password = make_password(uuid.uuid4().hex)
|
||||
user.is_password_autoset = True
|
||||
updated_users.append(user)
|
||||
|
||||
User.objects.bulk_update(updated_users, ["password"], batch_size=50)
|
||||
print("Success")
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
|
||||
def updated_issue_sort_order():
|
||||
try:
|
||||
issues = Issue.objects.all()
|
||||
updated_issues = []
|
||||
|
||||
for issue in issues:
|
||||
issue.sort_order = issue.sequence_id * random.randint(100, 500)
|
||||
updated_issues.append(issue)
|
||||
|
||||
Issue.objects.bulk_update(updated_issues, ["sort_order"], batch_size=100)
|
||||
print("Success")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
|
||||
def update_project_cover_images():
|
||||
try:
|
||||
project_cover_images = [
|
||||
"https://images.unsplash.com/photo-1677432658720-3d84f9d657b4?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
|
||||
"https://images.unsplash.com/photo-1661107564401-57497d8fe86f?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1332&q=80",
|
||||
"https://images.unsplash.com/photo-1677352241429-dc90cfc7a623?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1332&q=80",
|
||||
"https://images.unsplash.com/photo-1677196728306-eeafea692454?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1331&q=80",
|
||||
"https://images.unsplash.com/photo-1660902179734-c94c944f7830?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1255&q=80",
|
||||
"https://images.unsplash.com/photo-1672243775941-10d763d9adef?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
|
||||
"https://images.unsplash.com/photo-1677040628614-53936ff66632?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
|
||||
"https://images.unsplash.com/photo-1676920410907-8d5f8dd4b5ba?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1332&q=80",
|
||||
"https://images.unsplash.com/photo-1676846328604-ce831c481346?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1155&q=80",
|
||||
"https://images.unsplash.com/photo-1676744843212-09b7e64c3a05?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
|
||||
"https://images.unsplash.com/photo-1676798531090-1608bedeac7b?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
|
||||
"https://images.unsplash.com/photo-1597088758740-56fd7ec8a3f0?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1169&q=80",
|
||||
"https://images.unsplash.com/photo-1676638392418-80aad7c87b96?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=774&q=80",
|
||||
"https://images.unsplash.com/photo-1649639194967-2fec0b4ea7bc?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
|
||||
"https://images.unsplash.com/photo-1675883086902-b453b3f8146e?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=774&q=80",
|
||||
"https://images.unsplash.com/photo-1675887057159-40fca28fdc5d?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1173&q=80",
|
||||
"https://images.unsplash.com/photo-1675373980203-f84c5a672aa5?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
|
||||
"https://images.unsplash.com/photo-1675191475318-d2bf6bad1200?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1332&q=80",
|
||||
"https://images.unsplash.com/photo-1675456230532-2194d0c4bcc0?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
|
||||
"https://images.unsplash.com/photo-1675371788315-60fa0ef48267?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1332&q=80",
|
||||
]
|
||||
|
||||
projects = Project.objects.all()
|
||||
updated_projects = []
|
||||
for project in projects:
|
||||
project.cover_image = project_cover_images[random.randint(0, 19)]
|
||||
updated_projects.append(project)
|
||||
|
||||
Project.objects.bulk_update(updated_projects, ["cover_image"], batch_size=100)
|
||||
print("Success")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
|
||||
def update_user_view_property():
|
||||
try:
|
||||
project_members = ProjectMember.objects.all()
|
||||
updated_project_members = []
|
||||
for project_member in project_members:
|
||||
project_member.default_props = {
|
||||
"filters": {"type": None},
|
||||
"orderBy": "-created_at",
|
||||
"collapsed": True,
|
||||
"issueView": "list",
|
||||
"filterIssue": None,
|
||||
"groupByProperty": None,
|
||||
"showEmptyGroups": True,
|
||||
}
|
||||
updated_project_members.append(project_member)
|
||||
|
||||
ProjectMember.objects.bulk_update(
|
||||
updated_project_members, ["default_props"], batch_size=100
|
||||
)
|
||||
print("Success")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
|
||||
def update_label_color():
|
||||
try:
|
||||
labels = Label.objects.filter(color="")
|
||||
updated_labels = []
|
||||
for label in labels:
|
||||
label.color = f"#{random.randint(0, 0xFFFFFF+1):06X}"
|
||||
updated_labels.append(label)
|
||||
|
||||
Label.objects.bulk_update(updated_labels, ["color"], batch_size=100)
|
||||
print("Success")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
|
||||
def create_slack_integration():
|
||||
try:
|
||||
_ = Integration.objects.create(provider="slack", network=2, title="Slack")
|
||||
print("Success")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
|
||||
def update_integration_verified():
|
||||
try:
|
||||
integrations = Integration.objects.all()
|
||||
updated_integrations = []
|
||||
for integration in integrations:
|
||||
integration.verified = True
|
||||
updated_integrations.append(integration)
|
||||
|
||||
Integration.objects.bulk_update(
|
||||
updated_integrations, ["verified"], batch_size=10
|
||||
)
|
||||
print("Success")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
|
||||
def update_start_date():
|
||||
try:
|
||||
issues = Issue.objects.filter(state__group__in=["started", "completed"])
|
||||
updated_issues = []
|
||||
for issue in issues:
|
||||
issue.start_date = issue.created_at.date()
|
||||
updated_issues.append(issue)
|
||||
Issue.objects.bulk_update(updated_issues, ["start_date"], batch_size=500)
|
||||
print("Success")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"name": "plane-api",
|
||||
"version": "0.26.1",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"description": "API server powering Plane's backend"
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
name = "plane.api"
|
||||
@@ -1,18 +0,0 @@
|
||||
from .user import UserLiteSerializer
|
||||
from .workspace import WorkspaceLiteSerializer
|
||||
from .project import ProjectSerializer, ProjectLiteSerializer
|
||||
from .issue import (
|
||||
IssueSerializer,
|
||||
LabelSerializer,
|
||||
IssueLinkSerializer,
|
||||
IssueCommentSerializer,
|
||||
IssueAttachmentSerializer,
|
||||
IssueActivitySerializer,
|
||||
IssueExpandSerializer,
|
||||
IssueLiteSerializer,
|
||||
)
|
||||
from .state import StateLiteSerializer, StateSerializer
|
||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
|
||||
from .module import ModuleSerializer, ModuleIssueSerializer, ModuleLiteSerializer
|
||||
from .intake import IntakeIssueSerializer
|
||||
from .estimate import EstimatePointSerializer
|
||||
@@ -1,109 +0,0 @@
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
class BaseSerializer(serializers.ModelSerializer):
|
||||
id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# If 'fields' is provided in the arguments, remove it and store it separately.
|
||||
# This is done so as not to pass this custom argument up to the superclass.
|
||||
fields = kwargs.pop("fields", [])
|
||||
self.expand = kwargs.pop("expand", []) or []
|
||||
|
||||
# Call the initialization of the superclass.
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# If 'fields' was provided, filter the fields of the serializer accordingly.
|
||||
if fields:
|
||||
self.fields = self._filter_fields(fields=fields)
|
||||
|
||||
def _filter_fields(self, fields):
|
||||
"""
|
||||
Adjust the serializer's fields based on the provided 'fields' list.
|
||||
|
||||
:param fields: List or dictionary specifying which fields to include in the serializer.
|
||||
:return: The updated fields for the serializer.
|
||||
"""
|
||||
# Check each field_name in the provided fields.
|
||||
for field_name in fields:
|
||||
# If the field is a dictionary (indicating nested fields),
|
||||
# loop through its keys and values.
|
||||
if isinstance(field_name, dict):
|
||||
for key, value in field_name.items():
|
||||
# If the value of this nested field is a list,
|
||||
# perform a recursive filter on it.
|
||||
if isinstance(value, list):
|
||||
self._filter_fields(self.fields[key], value)
|
||||
|
||||
# Create a list to store allowed fields.
|
||||
allowed = []
|
||||
for item in fields:
|
||||
# If the item is a string, it directly represents a field's name.
|
||||
if isinstance(item, str):
|
||||
allowed.append(item)
|
||||
# If the item is a dictionary, it represents a nested field.
|
||||
# Add the key of this dictionary to the allowed list.
|
||||
elif isinstance(item, dict):
|
||||
allowed.append(list(item.keys())[0])
|
||||
|
||||
# Convert the current serializer's fields and the allowed fields to sets.
|
||||
existing = set(self.fields)
|
||||
allowed = set(allowed)
|
||||
|
||||
# Remove fields from the serializer that aren't in the 'allowed' list.
|
||||
for field_name in existing - allowed:
|
||||
self.fields.pop(field_name)
|
||||
|
||||
return self.fields
|
||||
|
||||
def to_representation(self, instance):
|
||||
response = super().to_representation(instance)
|
||||
|
||||
# Ensure 'expand' is iterable before processing
|
||||
if self.expand:
|
||||
for expand in self.expand:
|
||||
if expand in self.fields:
|
||||
# Import all the expandable serializers
|
||||
from . import (
|
||||
IssueSerializer,
|
||||
IssueLiteSerializer,
|
||||
ProjectLiteSerializer,
|
||||
StateLiteSerializer,
|
||||
UserLiteSerializer,
|
||||
WorkspaceLiteSerializer,
|
||||
EstimatePointSerializer,
|
||||
)
|
||||
|
||||
# Expansion mapper
|
||||
expansion = {
|
||||
"user": UserLiteSerializer,
|
||||
"workspace": WorkspaceLiteSerializer,
|
||||
"project": ProjectLiteSerializer,
|
||||
"default_assignee": UserLiteSerializer,
|
||||
"project_lead": UserLiteSerializer,
|
||||
"state": StateLiteSerializer,
|
||||
"created_by": UserLiteSerializer,
|
||||
"issue": IssueSerializer,
|
||||
"actor": UserLiteSerializer,
|
||||
"owned_by": UserLiteSerializer,
|
||||
"members": UserLiteSerializer,
|
||||
"parent": IssueLiteSerializer,
|
||||
"estimate_point": EstimatePointSerializer,
|
||||
}
|
||||
# Check if field in expansion then expand the field
|
||||
if expand in expansion:
|
||||
if isinstance(response.get(expand), list):
|
||||
exp_serializer = expansion[expand](
|
||||
getattr(instance, expand), many=True
|
||||
)
|
||||
else:
|
||||
exp_serializer = expansion[expand](
|
||||
getattr(instance, expand)
|
||||
)
|
||||
response[expand] = exp_serializer.data
|
||||
else:
|
||||
# You might need to handle this case differently
|
||||
response[expand] = getattr(instance, f"{expand}_id", None)
|
||||
|
||||
return response
|
||||
@@ -1,90 +0,0 @@
|
||||
# Third party imports
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import Cycle, CycleIssue
|
||||
from plane.utils.timezone_converter import convert_to_utc
|
||||
|
||||
|
||||
class CycleSerializer(BaseSerializer):
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||
completed_issues = serializers.IntegerField(read_only=True)
|
||||
started_issues = serializers.IntegerField(read_only=True)
|
||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_issues = serializers.IntegerField(read_only=True)
|
||||
total_estimates = serializers.FloatField(read_only=True)
|
||||
completed_estimates = serializers.FloatField(read_only=True)
|
||||
started_estimates = serializers.FloatField(read_only=True)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
project = self.context.get("project")
|
||||
if project and project.timezone:
|
||||
project_timezone = pytz.timezone(project.timezone)
|
||||
self.fields["start_date"].timezone = project_timezone
|
||||
self.fields["end_date"].timezone = project_timezone
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("end_date", None) is not None
|
||||
and data.get("start_date", None) > data.get("end_date", None)
|
||||
):
|
||||
raise serializers.ValidationError("Start date cannot exceed end date")
|
||||
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("end_date", None) is not None
|
||||
):
|
||||
project_id = self.initial_data.get("project_id") or (
|
||||
self.instance.project_id
|
||||
if self.instance and hasattr(self.instance, "project_id")
|
||||
else None
|
||||
)
|
||||
|
||||
if not project_id:
|
||||
raise serializers.ValidationError("Project ID is required")
|
||||
|
||||
data["start_date"] = convert_to_utc(
|
||||
date=str(data.get("start_date").date()),
|
||||
project_id=project_id,
|
||||
is_start_date=True,
|
||||
)
|
||||
data["end_date"] = convert_to_utc(
|
||||
date=str(data.get("end_date", None).date()),
|
||||
project_id=project_id,
|
||||
)
|
||||
return data
|
||||
|
||||
class Meta:
|
||||
model = Cycle
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"workspace",
|
||||
"project",
|
||||
"owned_by",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
|
||||
class CycleIssueSerializer(BaseSerializer):
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = CycleIssue
|
||||
fields = "__all__"
|
||||
read_only_fields = ["workspace", "project", "cycle"]
|
||||
|
||||
|
||||
class CycleLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Cycle
|
||||
fields = "__all__"
|
||||
@@ -1,10 +0,0 @@
|
||||
# Module imports
|
||||
from plane.db.models import EstimatePoint
|
||||
from .base import BaseSerializer
|
||||
|
||||
|
||||
class EstimatePointSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = EstimatePoint
|
||||
fields = ["id", "value"]
|
||||
read_only_fields = fields
|
||||
@@ -1,24 +0,0 @@
|
||||
# Module improts
|
||||
from .base import BaseSerializer
|
||||
from .issue import IssueExpandSerializer
|
||||
from plane.db.models import IntakeIssue
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
class IntakeIssueSerializer(BaseSerializer):
|
||||
issue_detail = IssueExpandSerializer(read_only=True, source="issue")
|
||||
inbox = serializers.UUIDField(source="intake.id", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = IntakeIssue
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
@@ -1,466 +0,0 @@
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from lxml import html
|
||||
from django.db import IntegrityError
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueType,
|
||||
IssueActivity,
|
||||
IssueAssignee,
|
||||
FileAsset,
|
||||
IssueComment,
|
||||
IssueLabel,
|
||||
IssueLink,
|
||||
Label,
|
||||
ProjectMember,
|
||||
State,
|
||||
User,
|
||||
)
|
||||
|
||||
from .base import BaseSerializer
|
||||
from .cycle import CycleLiteSerializer, CycleSerializer
|
||||
from .module import ModuleLiteSerializer, ModuleSerializer
|
||||
from .state import StateLiteSerializer
|
||||
from .user import UserLiteSerializer
|
||||
|
||||
# Django imports
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import URLValidator
|
||||
|
||||
|
||||
class IssueSerializer(BaseSerializer):
|
||||
assignees = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(
|
||||
queryset=User.objects.values_list("id", flat=True)
|
||||
),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
labels = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(
|
||||
queryset=Label.objects.values_list("id", flat=True)
|
||||
),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
type_id = serializers.PrimaryKeyRelatedField(
|
||||
source="type", queryset=IssueType.objects.all(), required=False, allow_null=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
read_only_fields = ["id", "workspace", "project", "updated_by", "updated_at"]
|
||||
exclude = ["description", "description_stripped"]
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("target_date", None) is not None
|
||||
and data.get("start_date", None) > data.get("target_date", None)
|
||||
):
|
||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||
|
||||
try:
|
||||
if data.get("description_html", None) is not None:
|
||||
parsed = html.fromstring(data["description_html"])
|
||||
parsed_str = html.tostring(parsed, encoding="unicode")
|
||||
data["description_html"] = parsed_str
|
||||
|
||||
except Exception:
|
||||
raise serializers.ValidationError("Invalid HTML passed")
|
||||
|
||||
# Validate assignees are from project
|
||||
if data.get("assignees", []):
|
||||
data["assignees"] = ProjectMember.objects.filter(
|
||||
project_id=self.context.get("project_id"),
|
||||
is_active=True,
|
||||
role__gte=15,
|
||||
member_id__in=data["assignees"],
|
||||
).values_list("member_id", flat=True)
|
||||
|
||||
# Validate labels are from project
|
||||
if data.get("labels", []):
|
||||
data["labels"] = Label.objects.filter(
|
||||
project_id=self.context.get("project_id"), id__in=data["labels"]
|
||||
).values_list("id", flat=True)
|
||||
|
||||
# Check state is from the project only else raise validation error
|
||||
if (
|
||||
data.get("state")
|
||||
and not State.objects.filter(
|
||||
project_id=self.context.get("project_id"), pk=data.get("state").id
|
||||
).exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"State is not valid please pass a valid state_id"
|
||||
)
|
||||
|
||||
# Check parent issue is from workspace as it can be cross workspace
|
||||
if (
|
||||
data.get("parent")
|
||||
and not Issue.objects.filter(
|
||||
workspace_id=self.context.get("workspace_id"), pk=data.get("parent").id
|
||||
).exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"Parent is not valid issue_id please pass a valid issue_id"
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
assignees = validated_data.pop("assignees", None)
|
||||
labels = validated_data.pop("labels", None)
|
||||
|
||||
project_id = self.context["project_id"]
|
||||
workspace_id = self.context["workspace_id"]
|
||||
default_assignee_id = self.context["default_assignee_id"]
|
||||
|
||||
issue_type = validated_data.pop("type", None)
|
||||
|
||||
if not issue_type:
|
||||
# Get default issue type
|
||||
issue_type = IssueType.objects.filter(
|
||||
project_issue_types__project_id=project_id, is_default=True
|
||||
).first()
|
||||
issue_type = issue_type
|
||||
|
||||
issue = Issue.objects.create(
|
||||
**validated_data, project_id=project_id, type=issue_type
|
||||
)
|
||||
|
||||
# Issue Audit Users
|
||||
created_by_id = issue.created_by_id
|
||||
updated_by_id = issue.updated_by_id
|
||||
|
||||
if assignees is not None and len(assignees):
|
||||
try:
|
||||
IssueAssignee.objects.bulk_create(
|
||||
[
|
||||
IssueAssignee(
|
||||
assignee_id=assignee_id,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for assignee_id in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
# Then assign it to default assignee, if it is a valid assignee
|
||||
if (
|
||||
default_assignee_id is not None
|
||||
and ProjectMember.objects.filter(
|
||||
member_id=default_assignee_id,
|
||||
project_id=project_id,
|
||||
role__gte=15,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
IssueAssignee.objects.create(
|
||||
assignee_id=default_assignee_id,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
if labels is not None and len(labels):
|
||||
try:
|
||||
IssueLabel.objects.bulk_create(
|
||||
[
|
||||
IssueLabel(
|
||||
label_id=label_id,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label_id in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
return issue
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
assignees = validated_data.pop("assignees", None)
|
||||
labels = validated_data.pop("labels", None)
|
||||
|
||||
# Related models
|
||||
project_id = instance.project_id
|
||||
workspace_id = instance.workspace_id
|
||||
created_by_id = instance.created_by_id
|
||||
updated_by_id = instance.updated_by_id
|
||||
|
||||
if assignees is not None:
|
||||
IssueAssignee.objects.filter(issue=instance).delete()
|
||||
try:
|
||||
IssueAssignee.objects.bulk_create(
|
||||
[
|
||||
IssueAssignee(
|
||||
assignee_id=assignee_id,
|
||||
issue=instance,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for assignee_id in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
if labels is not None:
|
||||
IssueLabel.objects.filter(issue=instance).delete()
|
||||
try:
|
||||
IssueLabel.objects.bulk_create(
|
||||
[
|
||||
IssueLabel(
|
||||
label_id=label_id,
|
||||
issue=instance,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label_id in labels
|
||||
],
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
# Time updation occues even when other related models are updated
|
||||
instance.updated_at = timezone.now()
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
if "assignees" in self.fields:
|
||||
if "assignees" in self.expand:
|
||||
from .user import UserLiteSerializer
|
||||
|
||||
data["assignees"] = UserLiteSerializer(
|
||||
User.objects.filter(
|
||||
pk__in=IssueAssignee.objects.filter(issue=instance).values_list(
|
||||
"assignee_id", flat=True
|
||||
)
|
||||
),
|
||||
many=True,
|
||||
).data
|
||||
else:
|
||||
data["assignees"] = [
|
||||
str(assignee)
|
||||
for assignee in IssueAssignee.objects.filter(
|
||||
issue=instance
|
||||
).values_list("assignee_id", flat=True)
|
||||
]
|
||||
if "labels" in self.fields:
|
||||
if "labels" in self.expand:
|
||||
data["labels"] = LabelSerializer(
|
||||
Label.objects.filter(
|
||||
pk__in=IssueLabel.objects.filter(issue=instance).values_list(
|
||||
"label_id", flat=True
|
||||
)
|
||||
),
|
||||
many=True,
|
||||
).data
|
||||
else:
|
||||
data["labels"] = [
|
||||
str(label)
|
||||
for label in IssueLabel.objects.filter(issue=instance).values_list(
|
||||
"label_id", flat=True
|
||||
)
|
||||
]
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class IssueLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = ["id", "sequence_id", "project_id"]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class LabelSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Label
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
|
||||
class IssueLinkSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueLink
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
def validate_url(self, value):
|
||||
# Check URL format
|
||||
validate_url = URLValidator()
|
||||
try:
|
||||
validate_url(value)
|
||||
except ValidationError:
|
||||
raise serializers.ValidationError("Invalid URL format.")
|
||||
|
||||
# Check URL scheme
|
||||
if not value.startswith(("http://", "https://")):
|
||||
raise serializers.ValidationError("Invalid URL scheme.")
|
||||
|
||||
return value
|
||||
|
||||
# Validation if url already exists
|
||||
def create(self, validated_data):
|
||||
if IssueLink.objects.filter(
|
||||
url=validated_data.get("url"), issue_id=validated_data.get("issue_id")
|
||||
).exists():
|
||||
raise serializers.ValidationError(
|
||||
{"error": "URL already exists for this Issue"}
|
||||
)
|
||||
return IssueLink.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
if (
|
||||
IssueLink.objects.filter(
|
||||
url=validated_data.get("url"), issue_id=instance.issue_id
|
||||
)
|
||||
.exclude(pk=instance.id)
|
||||
.exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
{"error": "URL already exists for this Issue"}
|
||||
)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class IssueAttachmentSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = FileAsset
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"updated_by",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
class IssueCommentSerializer(BaseSerializer):
|
||||
is_member = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = IssueComment
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
exclude = ["comment_stripped", "comment_json"]
|
||||
|
||||
def validate(self, data):
|
||||
try:
|
||||
if data.get("comment_html", None) is not None:
|
||||
parsed = html.fromstring(data["comment_html"])
|
||||
parsed_str = html.tostring(parsed, encoding="unicode")
|
||||
data["comment_html"] = parsed_str
|
||||
|
||||
except Exception:
|
||||
raise serializers.ValidationError("Invalid HTML passed")
|
||||
return data
|
||||
|
||||
|
||||
class IssueActivitySerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueActivity
|
||||
exclude = ["created_by", "updated_by"]
|
||||
|
||||
|
||||
class CycleIssueSerializer(BaseSerializer):
|
||||
cycle = CycleSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
fields = ["cycle"]
|
||||
|
||||
|
||||
class ModuleIssueSerializer(BaseSerializer):
|
||||
module = ModuleSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
fields = ["module"]
|
||||
|
||||
|
||||
class LabelLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Label
|
||||
fields = ["id", "name", "color"]
|
||||
|
||||
|
||||
class IssueExpandSerializer(BaseSerializer):
|
||||
cycle = CycleLiteSerializer(source="issue_cycle.cycle", read_only=True)
|
||||
module = ModuleLiteSerializer(source="issue_module.module", read_only=True)
|
||||
labels = LabelLiteSerializer(read_only=True, many=True)
|
||||
assignees = UserLiteSerializer(read_only=True, many=True)
|
||||
state = StateLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
@@ -1,179 +0,0 @@
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import (
|
||||
User,
|
||||
Module,
|
||||
ModuleLink,
|
||||
ModuleMember,
|
||||
ModuleIssue,
|
||||
ProjectMember,
|
||||
)
|
||||
|
||||
|
||||
class ModuleSerializer(BaseSerializer):
|
||||
members = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(
|
||||
queryset=User.objects.values_list("id", flat=True)
|
||||
),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||
completed_issues = serializers.IntegerField(read_only=True)
|
||||
started_issues = serializers.IntegerField(read_only=True)
|
||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_issues = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Module
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
data["members"] = [str(member.id) for member in instance.members.all()]
|
||||
return data
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("target_date", None) is not None
|
||||
and data.get("start_date", None) > data.get("target_date", None)
|
||||
):
|
||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||
|
||||
if data.get("members", []):
|
||||
data["members"] = ProjectMember.objects.filter(
|
||||
project_id=self.context.get("project_id"), member_id__in=data["members"]
|
||||
).values_list("member_id", flat=True)
|
||||
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
members = validated_data.pop("members", None)
|
||||
|
||||
project_id = self.context["project_id"]
|
||||
workspace_id = self.context["workspace_id"]
|
||||
|
||||
module_name = validated_data.get("name")
|
||||
if module_name:
|
||||
# Lookup for the module name in the module table for that project
|
||||
if Module.objects.filter(name=module_name, project_id=project_id).exists():
|
||||
raise serializers.ValidationError(
|
||||
{"error": "Module with this name already exists"}
|
||||
)
|
||||
|
||||
module = Module.objects.create(**validated_data, project_id=project_id)
|
||||
if members is not None:
|
||||
ModuleMember.objects.bulk_create(
|
||||
[
|
||||
ModuleMember(
|
||||
module=module,
|
||||
member_id=str(member),
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by=module.created_by,
|
||||
updated_by=module.updated_by,
|
||||
)
|
||||
for member in members
|
||||
],
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
return module
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
members = validated_data.pop("members", None)
|
||||
module_name = validated_data.get("name")
|
||||
if module_name:
|
||||
# Lookup for the module name in the module table for that project
|
||||
if (
|
||||
Module.objects.filter(name=module_name, project=instance.project)
|
||||
.exclude(id=instance.id)
|
||||
.exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
{"error": "Module with this name already exists"}
|
||||
)
|
||||
|
||||
if members is not None:
|
||||
ModuleMember.objects.filter(module=instance).delete()
|
||||
ModuleMember.objects.bulk_create(
|
||||
[
|
||||
ModuleMember(
|
||||
module=instance,
|
||||
member_id=str(member),
|
||||
project=instance.project,
|
||||
workspace=instance.project.workspace,
|
||||
created_by=instance.created_by,
|
||||
updated_by=instance.updated_by,
|
||||
)
|
||||
for member in members
|
||||
],
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class ModuleIssueSerializer(BaseSerializer):
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ModuleIssue
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"module",
|
||||
]
|
||||
|
||||
|
||||
class ModuleLinkSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = ModuleLink
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"module",
|
||||
]
|
||||
|
||||
# Validation if url already exists
|
||||
def create(self, validated_data):
|
||||
if ModuleLink.objects.filter(
|
||||
url=validated_data.get("url"), module_id=validated_data.get("module_id")
|
||||
).exists():
|
||||
raise serializers.ValidationError(
|
||||
{"error": "URL already exists for this Issue"}
|
||||
)
|
||||
return ModuleLink.objects.create(**validated_data)
|
||||
|
||||
|
||||
class ModuleLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Module
|
||||
fields = "__all__"
|
||||
@@ -1,98 +0,0 @@
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import Project, ProjectIdentifier, WorkspaceMember
|
||||
|
||||
from .base import BaseSerializer
|
||||
|
||||
|
||||
class ProjectSerializer(BaseSerializer):
|
||||
total_members = serializers.IntegerField(read_only=True)
|
||||
total_cycles = serializers.IntegerField(read_only=True)
|
||||
total_modules = serializers.IntegerField(read_only=True)
|
||||
is_member = serializers.BooleanField(read_only=True)
|
||||
sort_order = serializers.FloatField(read_only=True)
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
is_deployed = serializers.BooleanField(read_only=True)
|
||||
cover_image_url = serializers.CharField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"emoji",
|
||||
"workspace",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"deleted_at",
|
||||
"cover_image_url",
|
||||
]
|
||||
|
||||
def validate(self, data):
|
||||
# Check project lead should be a member of the workspace
|
||||
if (
|
||||
data.get("project_lead", None) is not None
|
||||
and not WorkspaceMember.objects.filter(
|
||||
workspace_id=self.context["workspace_id"],
|
||||
member_id=data.get("project_lead"),
|
||||
).exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"Project lead should be a user in the workspace"
|
||||
)
|
||||
|
||||
# Check default assignee should be a member of the workspace
|
||||
if (
|
||||
data.get("default_assignee", None) is not None
|
||||
and not WorkspaceMember.objects.filter(
|
||||
workspace_id=self.context["workspace_id"],
|
||||
member_id=data.get("default_assignee"),
|
||||
).exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"Default assignee should be a user in the workspace"
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
identifier = validated_data.get("identifier", "").strip().upper()
|
||||
if identifier == "":
|
||||
raise serializers.ValidationError(detail="Project Identifier is required")
|
||||
|
||||
if ProjectIdentifier.objects.filter(
|
||||
name=identifier, workspace_id=self.context["workspace_id"]
|
||||
).exists():
|
||||
raise serializers.ValidationError(detail="Project Identifier is taken")
|
||||
|
||||
project = Project.objects.create(
|
||||
**validated_data, workspace_id=self.context["workspace_id"]
|
||||
)
|
||||
_ = ProjectIdentifier.objects.create(
|
||||
name=project.identifier,
|
||||
project=project,
|
||||
workspace_id=self.context["workspace_id"],
|
||||
)
|
||||
return project
|
||||
|
||||
|
||||
class ProjectLiteSerializer(BaseSerializer):
|
||||
cover_image_url = serializers.CharField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = [
|
||||
"id",
|
||||
"identifier",
|
||||
"name",
|
||||
"cover_image",
|
||||
"icon_prop",
|
||||
"emoji",
|
||||
"description",
|
||||
"cover_image_url",
|
||||
]
|
||||
read_only_fields = fields
|
||||
@@ -1,34 +0,0 @@
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import State
|
||||
|
||||
|
||||
class StateSerializer(BaseSerializer):
|
||||
def validate(self, data):
|
||||
# If the default is being provided then make all other states default False
|
||||
if data.get("default", False):
|
||||
State.objects.filter(project_id=self.context.get("project_id")).update(
|
||||
default=False
|
||||
)
|
||||
return data
|
||||
|
||||
class Meta:
|
||||
model = State
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"workspace",
|
||||
"project",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
|
||||
class StateLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = State
|
||||
fields = ["id", "name", "color", "group"]
|
||||
read_only_fields = fields
|
||||
@@ -1,20 +0,0 @@
|
||||
# Module imports
|
||||
from plane.db.models import User
|
||||
|
||||
from .base import BaseSerializer
|
||||
|
||||
|
||||
class UserLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
"id",
|
||||
"first_name",
|
||||
"last_name",
|
||||
"email",
|
||||
"avatar",
|
||||
"avatar_url",
|
||||
"display_name",
|
||||
"email",
|
||||
]
|
||||
read_only_fields = fields
|
||||
@@ -1,12 +0,0 @@
|
||||
# Module imports
|
||||
from plane.db.models import Workspace
|
||||
from .base import BaseSerializer
|
||||
|
||||
|
||||
class WorkspaceLiteSerializer(BaseSerializer):
|
||||
"""Lite serializer with only required fields"""
|
||||
|
||||
class Meta:
|
||||
model = Workspace
|
||||
fields = ["name", "slug", "id"]
|
||||
read_only_fields = fields
|
||||
@@ -1,17 +0,0 @@
|
||||
from .project import urlpatterns as project_patterns
|
||||
from .state import urlpatterns as state_patterns
|
||||
from .issue import urlpatterns as issue_patterns
|
||||
from .cycle import urlpatterns as cycle_patterns
|
||||
from .module import urlpatterns as module_patterns
|
||||
from .intake import urlpatterns as intake_patterns
|
||||
from .member import urlpatterns as member_patterns
|
||||
|
||||
urlpatterns = [
|
||||
*project_patterns,
|
||||
*state_patterns,
|
||||
*issue_patterns,
|
||||
*cycle_patterns,
|
||||
*module_patterns,
|
||||
*intake_patterns,
|
||||
*member_patterns,
|
||||
]
|
||||
@@ -1,46 +0,0 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views.cycle import (
|
||||
CycleAPIEndpoint,
|
||||
CycleIssueAPIEndpoint,
|
||||
TransferCycleIssueAPIEndpoint,
|
||||
CycleArchiveUnarchiveAPIEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
|
||||
CycleAPIEndpoint.as_view(),
|
||||
name="cycles",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
|
||||
CycleAPIEndpoint.as_view(),
|
||||
name="cycles",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
|
||||
CycleIssueAPIEndpoint.as_view(),
|
||||
name="cycle-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:issue_id>/",
|
||||
CycleIssueAPIEndpoint.as_view(),
|
||||
name="cycle-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
|
||||
TransferCycleIssueAPIEndpoint.as_view(),
|
||||
name="transfer-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/archive/",
|
||||
CycleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||
name="cycle-archive-unarchive",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/",
|
||||
CycleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||
name="cycle-archive-unarchive",
|
||||
),
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views import IntakeIssueAPIEndpoint
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-issues/",
|
||||
IntakeIssueAPIEndpoint.as_view(),
|
||||
name="intake-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-issues/<uuid:issue_id>/",
|
||||
IntakeIssueAPIEndpoint.as_view(),
|
||||
name="intake-issue",
|
||||
),
|
||||
]
|
||||
@@ -1,79 +0,0 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views import (
|
||||
IssueAPIEndpoint,
|
||||
LabelAPIEndpoint,
|
||||
IssueLinkAPIEndpoint,
|
||||
IssueCommentAPIEndpoint,
|
||||
IssueActivityAPIEndpoint,
|
||||
WorkspaceIssueAPIEndpoint,
|
||||
IssueAttachmentEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/issues/<str:project__identifier>-<str:issue__identifier>/",
|
||||
WorkspaceIssueAPIEndpoint.as_view(),
|
||||
name="issue-by-identifier",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
||||
IssueAPIEndpoint.as_view(),
|
||||
name="issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
|
||||
IssueAPIEndpoint.as_view(),
|
||||
name="issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/labels/",
|
||||
LabelAPIEndpoint.as_view(),
|
||||
name="label",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/labels/<uuid:pk>/",
|
||||
LabelAPIEndpoint.as_view(),
|
||||
name="label",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/links/",
|
||||
IssueLinkAPIEndpoint.as_view(),
|
||||
name="link",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/links/<uuid:pk>/",
|
||||
IssueLinkAPIEndpoint.as_view(),
|
||||
name="link",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
||||
IssueCommentAPIEndpoint.as_view(),
|
||||
name="comment",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
||||
IssueCommentAPIEndpoint.as_view(),
|
||||
name="comment",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/activities/",
|
||||
IssueActivityAPIEndpoint.as_view(),
|
||||
name="activity",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/activities/<uuid:pk>/",
|
||||
IssueActivityAPIEndpoint.as_view(),
|
||||
name="activity",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/",
|
||||
IssueAttachmentEndpoint.as_view(),
|
||||
name="attachment",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/<uuid:pk>/",
|
||||
IssueAttachmentEndpoint.as_view(),
|
||||
name="issue-attachment",
|
||||
),
|
||||
]
|
||||
@@ -1,11 +0,0 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views import ProjectMemberAPIEndpoint
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<str:project_id>/members/",
|
||||
ProjectMemberAPIEndpoint.as_view(),
|
||||
name="users",
|
||||
)
|
||||
]
|
||||
@@ -1,40 +0,0 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views import (
|
||||
ModuleAPIEndpoint,
|
||||
ModuleIssueAPIEndpoint,
|
||||
ModuleArchiveUnarchiveAPIEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
|
||||
ModuleAPIEndpoint.as_view(),
|
||||
name="modules",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
|
||||
ModuleAPIEndpoint.as_view(),
|
||||
name="modules",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
|
||||
ModuleIssueAPIEndpoint.as_view(),
|
||||
name="module-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:issue_id>/",
|
||||
ModuleIssueAPIEndpoint.as_view(),
|
||||
name="module-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/archive/",
|
||||
ModuleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||
name="module-archive-unarchive",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/",
|
||||
ModuleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||
name="module-archive-unarchive",
|
||||
),
|
||||
]
|
||||
@@ -1,19 +0,0 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views import ProjectAPIEndpoint, ProjectArchiveUnarchiveAPIEndpoint
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/", ProjectAPIEndpoint.as_view(), name="project"
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:pk>/",
|
||||
ProjectAPIEndpoint.as_view(),
|
||||
name="project",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archive/",
|
||||
ProjectArchiveUnarchiveAPIEndpoint.as_view(),
|
||||
name="project-archive-unarchive",
|
||||
),
|
||||
]
|
||||
@@ -1,16 +0,0 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views import StateAPIEndpoint
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/",
|
||||
StateAPIEndpoint.as_view(),
|
||||
name="states",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:state_id>/",
|
||||
StateAPIEndpoint.as_view(),
|
||||
name="states",
|
||||
),
|
||||
]
|
||||
@@ -1,30 +0,0 @@
|
||||
from .project import ProjectAPIEndpoint, ProjectArchiveUnarchiveAPIEndpoint
|
||||
|
||||
from .state import StateAPIEndpoint
|
||||
|
||||
from .issue import (
|
||||
WorkspaceIssueAPIEndpoint,
|
||||
IssueAPIEndpoint,
|
||||
LabelAPIEndpoint,
|
||||
IssueLinkAPIEndpoint,
|
||||
IssueCommentAPIEndpoint,
|
||||
IssueActivityAPIEndpoint,
|
||||
IssueAttachmentEndpoint,
|
||||
)
|
||||
|
||||
from .cycle import (
|
||||
CycleAPIEndpoint,
|
||||
CycleIssueAPIEndpoint,
|
||||
TransferCycleIssueAPIEndpoint,
|
||||
CycleArchiveUnarchiveAPIEndpoint,
|
||||
)
|
||||
|
||||
from .module import (
|
||||
ModuleAPIEndpoint,
|
||||
ModuleIssueAPIEndpoint,
|
||||
ModuleArchiveUnarchiveAPIEndpoint,
|
||||
)
|
||||
|
||||
from .member import ProjectMemberAPIEndpoint
|
||||
|
||||
from .intake import IntakeIssueAPIEndpoint
|
||||
@@ -1,159 +0,0 @@
|
||||
# Python imports
|
||||
import zoneinfo
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.db import IntegrityError
|
||||
from django.urls import resolve
|
||||
from django.utils import timezone
|
||||
from plane.db.models.api import APIToken
|
||||
from rest_framework import status
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.views import APIView
|
||||
|
||||
# Module imports
|
||||
from plane.api.middleware.api_authentication import APIKeyAuthentication
|
||||
from plane.api.rate_limit import ApiKeyRateThrottle, ServiceTokenRateThrottle
|
||||
from plane.utils.exception_logger import log_exception
|
||||
from plane.utils.paginator import BasePaginator
|
||||
|
||||
|
||||
class TimezoneMixin:
|
||||
"""
|
||||
This enables timezone conversion according
|
||||
to the user set timezone
|
||||
"""
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
super().initial(request, *args, **kwargs)
|
||||
if request.user.is_authenticated:
|
||||
timezone.activate(zoneinfo.ZoneInfo(request.user.user_timezone))
|
||||
else:
|
||||
timezone.deactivate()
|
||||
|
||||
|
||||
class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
||||
authentication_classes = [APIKeyAuthentication]
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
for backend in list(self.filter_backends):
|
||||
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||
return queryset
|
||||
|
||||
def get_throttles(self):
|
||||
throttle_classes = []
|
||||
api_key = self.request.headers.get("X-Api-Key")
|
||||
|
||||
if api_key:
|
||||
service_token = APIToken.objects.filter(
|
||||
token=api_key, is_service=True
|
||||
).first()
|
||||
|
||||
if service_token:
|
||||
throttle_classes.append(ServiceTokenRateThrottle())
|
||||
return throttle_classes
|
||||
|
||||
throttle_classes.append(ApiKeyRateThrottle())
|
||||
|
||||
return throttle_classes
|
||||
|
||||
def handle_exception(self, exc):
|
||||
"""
|
||||
Handle any exception that occurs, by returning an appropriate response,
|
||||
or re-raising the error.
|
||||
"""
|
||||
try:
|
||||
response = super().handle_exception(exc)
|
||||
return response
|
||||
except Exception as e:
|
||||
if isinstance(e, IntegrityError):
|
||||
return Response(
|
||||
{"error": "The payload is not valid"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if isinstance(e, ValidationError):
|
||||
return Response(
|
||||
{"error": "Please provide valid detail"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if isinstance(e, ObjectDoesNotExist):
|
||||
return Response(
|
||||
{"error": "The requested resource does not exist."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
if isinstance(e, KeyError):
|
||||
return Response(
|
||||
{"error": "The required key does not exist."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
log_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
try:
|
||||
response = super().dispatch(request, *args, **kwargs)
|
||||
if settings.DEBUG:
|
||||
from django.db import connection
|
||||
|
||||
print(
|
||||
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
||||
)
|
||||
return response
|
||||
except Exception as exc:
|
||||
response = self.handle_exception(exc)
|
||||
return exc
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
# Call super to get the default response
|
||||
response = super().finalize_response(request, response, *args, **kwargs)
|
||||
|
||||
# Add custom headers if they exist in the request META
|
||||
ratelimit_remaining = request.META.get("X-RateLimit-Remaining")
|
||||
if ratelimit_remaining is not None:
|
||||
response["X-RateLimit-Remaining"] = ratelimit_remaining
|
||||
|
||||
ratelimit_reset = request.META.get("X-RateLimit-Reset")
|
||||
if ratelimit_reset is not None:
|
||||
response["X-RateLimit-Reset"] = ratelimit_reset
|
||||
|
||||
return response
|
||||
|
||||
@property
|
||||
def workspace_slug(self):
|
||||
return self.kwargs.get("slug", None)
|
||||
|
||||
@property
|
||||
def project_id(self):
|
||||
project_id = self.kwargs.get("project_id", None)
|
||||
if project_id:
|
||||
return project_id
|
||||
|
||||
if resolve(self.request.path_info).url_name == "project":
|
||||
return self.kwargs.get("pk", None)
|
||||
|
||||
@property
|
||||
def fields(self):
|
||||
fields = [
|
||||
field for field in self.request.GET.get("fields", "").split(",") if field
|
||||
]
|
||||
return fields if fields else None
|
||||
|
||||
@property
|
||||
def expand(self):
|
||||
expand = [
|
||||
expand for expand in self.request.GET.get("expand", "").split(",") if expand
|
||||
]
|
||||
return expand if expand else None
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,358 +0,0 @@
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q, Value, UUIDField
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.api.serializers import IntakeIssueSerializer, IssueSerializer
|
||||
from plane.app.permissions import ProjectLitePermission
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import Intake, IntakeIssue, Issue, Project, ProjectMember, State
|
||||
from plane.utils.host import base_host
|
||||
from .base import BaseAPIView
|
||||
from plane.db.models.intake import SourceType
|
||||
|
||||
|
||||
class IntakeIssueAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to intake issues.
|
||||
|
||||
"""
|
||||
|
||||
permission_classes = [ProjectLitePermission]
|
||||
|
||||
serializer_class = IntakeIssueSerializer
|
||||
model = IntakeIssue
|
||||
|
||||
filterset_fields = ["status"]
|
||||
|
||||
def get_queryset(self):
|
||||
intake = Intake.objects.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
).first()
|
||||
|
||||
project = Project.objects.get(
|
||||
workspace__slug=self.kwargs.get("slug"), pk=self.kwargs.get("project_id")
|
||||
)
|
||||
|
||||
if intake is None and not project.intake_view:
|
||||
return IntakeIssue.objects.none()
|
||||
|
||||
return (
|
||||
IntakeIssue.objects.filter(
|
||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
intake_id=intake.id,
|
||||
)
|
||||
.select_related("issue", "workspace", "project")
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, issue_id=None):
|
||||
if issue_id:
|
||||
intake_issue_queryset = self.get_queryset().get(issue_id=issue_id)
|
||||
intake_issue_data = IntakeIssueSerializer(
|
||||
intake_issue_queryset, fields=self.fields, expand=self.expand
|
||||
).data
|
||||
return Response(intake_issue_data, status=status.HTTP_200_OK)
|
||||
issue_queryset = self.get_queryset()
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(issue_queryset),
|
||||
on_results=lambda intake_issues: IntakeIssueSerializer(
|
||||
intake_issues, many=True, fields=self.fields, expand=self.expand
|
||||
).data,
|
||||
)
|
||||
|
||||
def post(self, request, slug, project_id):
|
||||
if not request.data.get("issue", {}).get("name", False):
|
||||
return Response(
|
||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
intake = Intake.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).first()
|
||||
|
||||
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
||||
|
||||
# Intake view
|
||||
if intake is None and not project.intake_view:
|
||||
return Response(
|
||||
{
|
||||
"error": "Intake is not enabled for this project enable it through the project's api"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check for valid priority
|
||||
if request.data.get("issue", {}).get("priority", "none") not in [
|
||||
"low",
|
||||
"medium",
|
||||
"high",
|
||||
"urgent",
|
||||
"none",
|
||||
]:
|
||||
return Response(
|
||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# create an issue
|
||||
issue = Issue.objects.create(
|
||||
name=request.data.get("issue", {}).get("name"),
|
||||
description=request.data.get("issue", {}).get("description", {}),
|
||||
description_html=request.data.get("issue", {}).get(
|
||||
"description_html", "<p></p>"
|
||||
),
|
||||
priority=request.data.get("issue", {}).get("priority", "none"),
|
||||
project_id=project_id,
|
||||
)
|
||||
|
||||
# create an intake issue
|
||||
intake_issue = IntakeIssue.objects.create(
|
||||
intake_id=intake.id,
|
||||
project_id=project_id,
|
||||
issue=issue,
|
||||
source=SourceType.IN_APP,
|
||||
)
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
type="issue.activity.created",
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
intake=str(intake_issue.id),
|
||||
)
|
||||
|
||||
serializer = IntakeIssueSerializer(intake_issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def patch(self, request, slug, project_id, issue_id):
|
||||
intake = Intake.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).first()
|
||||
|
||||
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
||||
|
||||
# Intake view
|
||||
if intake is None and not project.intake_view:
|
||||
return Response(
|
||||
{
|
||||
"error": "Intake is not enabled for this project enable it through the project's api"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the intake issue
|
||||
intake_issue = IntakeIssue.objects.get(
|
||||
issue_id=issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
intake_id=intake.id,
|
||||
)
|
||||
|
||||
# Get the project member
|
||||
project_member = ProjectMember.objects.get(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
# Only project members admins and created_by users can access this endpoint
|
||||
if project_member.role <= 5 and str(intake_issue.created_by_id) != str(
|
||||
request.user.id
|
||||
):
|
||||
return Response(
|
||||
{"error": "You cannot edit intake issues"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get issue data
|
||||
issue_data = request.data.pop("issue", False)
|
||||
|
||||
if bool(issue_data):
|
||||
issue = Issue.objects.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=Q(
|
||||
~Q(labels__id__isnull=True)
|
||||
& Q(label_issue__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=Q(
|
||||
~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True)
|
||||
& Q(issue_assignee__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
).get(pk=issue_id, workspace__slug=slug, project_id=project_id)
|
||||
# Only allow guests to edit name and description
|
||||
if project_member.role <= 5:
|
||||
issue_data = {
|
||||
"name": issue_data.get("name", issue.name),
|
||||
"description_html": issue_data.get(
|
||||
"description_html", issue.description_html
|
||||
),
|
||||
"description": issue_data.get("description", issue.description),
|
||||
}
|
||||
|
||||
issue_serializer = IssueSerializer(issue, data=issue_data, partial=True)
|
||||
|
||||
if issue_serializer.is_valid():
|
||||
current_instance = issue
|
||||
# Log all the updates
|
||||
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
|
||||
if issue is not None:
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(current_instance).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
intake=(intake_issue.id),
|
||||
)
|
||||
issue_serializer.save()
|
||||
else:
|
||||
return Response(
|
||||
issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Only project admins and members can edit intake issue attributes
|
||||
if project_member.role > 15:
|
||||
serializer = IntakeIssueSerializer(
|
||||
intake_issue, data=request.data, partial=True
|
||||
)
|
||||
current_instance = json.dumps(
|
||||
IntakeIssueSerializer(intake_issue).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
# Update the issue state if the issue is rejected or marked as duplicate
|
||||
if serializer.data["status"] in [-1, 2]:
|
||||
issue = Issue.objects.get(
|
||||
pk=issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
state = State.objects.filter(
|
||||
group="cancelled", workspace__slug=slug, project_id=project_id
|
||||
).first()
|
||||
if state is not None:
|
||||
issue.state = state
|
||||
issue.save()
|
||||
|
||||
# Update the issue state if it is accepted
|
||||
if serializer.data["status"] in [1]:
|
||||
issue = Issue.objects.get(
|
||||
pk=issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
|
||||
# Update the issue state only if it is in triage state
|
||||
if issue.state.is_triage:
|
||||
# Move to default state
|
||||
state = State.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, default=True
|
||||
).first()
|
||||
if state is not None:
|
||||
issue.state = state
|
||||
issue.save()
|
||||
|
||||
# create a activity for status change
|
||||
issue_activity.delay(
|
||||
type="intake.activity.created",
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=False,
|
||||
origin=base_host(request=request, is_app=True),
|
||||
intake=str(intake_issue.id),
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(
|
||||
IntakeIssueSerializer(intake_issue).data, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
def delete(self, request, slug, project_id, issue_id):
|
||||
intake = Intake.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).first()
|
||||
|
||||
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
||||
|
||||
# Intake view
|
||||
if intake is None and not project.intake_view:
|
||||
return Response(
|
||||
{
|
||||
"error": "Intake is not enabled for this project enable it through the project's api"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the intake issue
|
||||
intake_issue = IntakeIssue.objects.get(
|
||||
issue_id=issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
intake_id=intake.id,
|
||||
)
|
||||
|
||||
# Check the issue status
|
||||
if intake_issue.status in [-2, -1, 0, 2]:
|
||||
# Delete the issue also
|
||||
issue = Issue.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk=issue_id
|
||||
).first()
|
||||
if issue.created_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the issue"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
issue.delete()
|
||||
|
||||
intake_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user