mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Compare commits
308 Commits
feat/confl
...
fix/favori
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c809538853 | ||
|
|
14ec8215e7 | ||
|
|
34820eec7a | ||
|
|
93e6c3b6e0 | ||
|
|
8f8a97589d | ||
|
|
3a5c77e8a4 | ||
|
|
79fbcaa2b2 | ||
|
|
76983a57e9 | ||
|
|
e9b1151702 | ||
|
|
f4f5e5a0d3 | ||
|
|
f55c135052 | ||
|
|
8924e303da | ||
|
|
c89fe9a313 | ||
|
|
b381331b75 | ||
|
|
ee76cb1dc7 | ||
|
|
daaa04c6ea | ||
|
|
67f2e2fdb2 | ||
|
|
18df1530c1 | ||
|
|
dd3df20319 | ||
|
|
569b592711 | ||
|
|
f75df83ca1 | ||
|
|
8415df4cf3 | ||
|
|
3c684ecab7 | ||
|
|
0b01d3e88d | ||
|
|
889393e1d1 | ||
|
|
6fa45d8723 | ||
|
|
88533933b4 | ||
|
|
fffa8648bb | ||
|
|
1f8f6d1b26 | ||
|
|
cce7bddbcc | ||
|
|
518327e380 | ||
|
|
6bb534dabc | ||
|
|
dc2e293058 | ||
|
|
1adfb4dbe4 | ||
|
|
f2af5f0653 | ||
|
|
e3143ff00b | ||
|
|
7b82d1c62f | ||
|
|
3c2aec2776 | ||
|
|
35e58e9ec7 | ||
|
|
ba9d9fd5eb | ||
|
|
040ee4b256 | ||
|
|
f48bc5a876 | ||
|
|
10e9122c1d | ||
|
|
d5cbe3283b | ||
|
|
ae931f8172 | ||
|
|
a8c6483c60 | ||
|
|
9c761a614f | ||
|
|
adf88a0f13 | ||
|
|
5d2983d027 | ||
|
|
8339daa3ee | ||
|
|
4a9e09a54a | ||
|
|
2c609670c8 | ||
|
|
dfcba4dfc1 | ||
|
|
d0e68cdcfb | ||
|
|
43103a1445 | ||
|
|
1c155f6cbe | ||
|
|
1707f4f282 | ||
|
|
c2c2ad0d7a | ||
|
|
1bf8f82ccb | ||
|
|
3bdd91e577 | ||
|
|
1f9c7a4b67 | ||
|
|
d1828c9496 | ||
|
|
3f87d8b99d | ||
|
|
aba6e603a3 | ||
|
|
b4f2176ffa | ||
|
|
4d978c1a8c | ||
|
|
58f203dd38 | ||
|
|
ca088a464f | ||
|
|
0d6e581789 | ||
|
|
c92129ef41 | ||
|
|
d22b633d50 | ||
|
|
a8b2bcc838 | ||
|
|
78481d45d4 | ||
|
|
3a6d3d4e82 | ||
|
|
66c2cbe7d6 | ||
|
|
f5027f4268 | ||
|
|
31fe9a1a02 | ||
|
|
2978593c63 | ||
|
|
8a05cd442c | ||
|
|
c6cdc12165 | ||
|
|
7b6a2343cb | ||
|
|
66aedafe8a | ||
|
|
7af9c7bc33 | ||
|
|
0839666d81 | ||
|
|
68a211d00e | ||
|
|
3545d94025 | ||
|
|
17e46c812a | ||
|
|
73455c8040 | ||
|
|
9c1c0ed166 | ||
|
|
ae45ff158a | ||
|
|
c6909604b1 | ||
|
|
b95d7716e2 | ||
|
|
8577a56068 | ||
|
|
2ee6cd20d8 | ||
|
|
8771c80c9b | ||
|
|
2ad1047323 | ||
|
|
1956da2b90 | ||
|
|
eca79f33b6 | ||
|
|
8f9b568a65 | ||
|
|
a6d111f66d | ||
|
|
f1f7fa907a | ||
|
|
b4feaf973a | ||
|
|
39a607ac0a | ||
|
|
d3c3d3c5ab | ||
|
|
065c9779bb | ||
|
|
cb21dcbcef | ||
|
|
e7948eabf2 | ||
|
|
c2b5464e40 | ||
|
|
e055abb711 | ||
|
|
44a0ff5c67 | ||
|
|
075b8efa99 | ||
|
|
f27c25821c | ||
|
|
aade07b37a | ||
|
|
8107045d8c | ||
|
|
4ce255a872 | ||
|
|
a8c1b8cdef | ||
|
|
78dd15a801 | ||
|
|
2d434f0b9c | ||
|
|
209b700fd9 | ||
|
|
39e3c28ad8 | ||
|
|
cfc70622d6 | ||
|
|
281948c1ce | ||
|
|
2554110397 | ||
|
|
482b363045 | ||
|
|
fff27c60e4 | ||
|
|
474d7ef3c0 | ||
|
|
a7ecfade98 | ||
|
|
996192b9bf | ||
|
|
4cb02a9270 | ||
|
|
85719b9a12 | ||
|
|
0b1f9f0e5b | ||
|
|
d042dac042 | ||
|
|
f2733ab4df | ||
|
|
5464e62a03 | ||
|
|
e4d6e5e1af | ||
|
|
cd85a9fe09 | ||
|
|
6ade86f89d | ||
|
|
65caaa14cd | ||
|
|
0e92cae05f | ||
|
|
9523799f34 | ||
|
|
f5f3c4915f | ||
|
|
08d9e95a86 | ||
|
|
22671ec8a7 | ||
|
|
56331a7b55 | ||
|
|
33d6a8d233 | ||
|
|
4c353b6eeb | ||
|
|
0cc5a5357b | ||
|
|
e758e08785 | ||
|
|
890888a274 | ||
|
|
f7de9a3497 | ||
|
|
830d1c0b5a | ||
|
|
4b0946e093 | ||
|
|
1a26768291 | ||
|
|
c93b826c48 | ||
|
|
ce89c7dcff | ||
|
|
f06095f120 | ||
|
|
dd3b0f6a3f | ||
|
|
24973c1386 | ||
|
|
15b0a448ee | ||
|
|
4d484577b5 | ||
|
|
2d78f6fd22 | ||
|
|
77694ee8ba | ||
|
|
ac8e588ac3 | ||
|
|
2136872351 | ||
|
|
a90724516b | ||
|
|
31f67e189d | ||
|
|
c6db050443 | ||
|
|
f9a3778c7f | ||
|
|
ec1662cbd6 | ||
|
|
7986a28ca2 | ||
|
|
cd540e9641 | ||
|
|
676ec7e396 | ||
|
|
6b12c78cea | ||
|
|
f617937542 | ||
|
|
988201d729 | ||
|
|
6c2b28df91 | ||
|
|
53e5d4b40c | ||
|
|
fd61079c8b | ||
|
|
7767be2e21 | ||
|
|
a623456e63 | ||
|
|
fb586c58d2 | ||
|
|
fb46249ccf | ||
|
|
0e4ce2baa5 | ||
|
|
4e815c0fed | ||
|
|
1cd55cd95b | ||
|
|
d8d476463b | ||
|
|
fc2585bf64 | ||
|
|
6dcbea6d14 | ||
|
|
12c24ad255 | ||
|
|
387dbd89f5 | ||
|
|
b7d792ed07 | ||
|
|
54a5e5e761 | ||
|
|
837f09ed90 | ||
|
|
38f8aa90c1 | ||
|
|
c75091ca3a | ||
|
|
61ce055cb3 | ||
|
|
977b47d35f | ||
|
|
509c258b07 | ||
|
|
3cfb0ac518 | ||
|
|
156f1011f3 | ||
|
|
a36d1a753e | ||
|
|
9a927ded84 | ||
|
|
72f00e378d | ||
|
|
d3ec1aa422 | ||
|
|
dda19b0a3f | ||
|
|
2b570da890 | ||
|
|
5918607171 | ||
|
|
f1496e3144 | ||
|
|
9717497b4e | ||
|
|
2f8c8ac40f | ||
|
|
734e920e08 | ||
|
|
82fa1347d1 | ||
|
|
a7aa5c2ba7 | ||
|
|
825b2f26bf | ||
|
|
af51992eba | ||
|
|
8f59a36bda | ||
|
|
5ad0114aac | ||
|
|
095639b976 | ||
|
|
db722d580f | ||
|
|
0363057d9c | ||
|
|
3dc933f0e8 | ||
|
|
fc33238d89 | ||
|
|
86464c1d6f | ||
|
|
cc479f39a7 | ||
|
|
b50df9ef99 | ||
|
|
c637639a3e | ||
|
|
12401c54cc | ||
|
|
1201a4245e | ||
|
|
fc15ca5565 | ||
|
|
4e8b7e6dbb | ||
|
|
b0bc818362 | ||
|
|
c8491a13b3 | ||
|
|
83587c2c6b | ||
|
|
d9d62c2d5a | ||
|
|
b591203da6 | ||
|
|
78e0405971 | ||
|
|
26040144fc | ||
|
|
0fd36257d7 | ||
|
|
c217185b07 | ||
|
|
a5628c4ce1 | ||
|
|
764e08140c | ||
|
|
26be5dac14 | ||
|
|
35f78e8cfb | ||
|
|
03f7685f8b | ||
|
|
d62654e1d5 | ||
|
|
6e379b93b4 | ||
|
|
e78263e01f | ||
|
|
936c21d65e | ||
|
|
e824c37f36 | ||
|
|
830d4045be | ||
|
|
439d678763 | ||
|
|
f3347c5430 | ||
|
|
fa2cfbbb1b | ||
|
|
184c3332eb | ||
|
|
c2150687a6 | ||
|
|
f2694e0be4 | ||
|
|
ab2e96a915 | ||
|
|
7d4bb3e12b | ||
|
|
96563b438e | ||
|
|
3b2af2d028 | ||
|
|
626464513d | ||
|
|
209dc57307 | ||
|
|
8d5d0422e9 | ||
|
|
1e1a912654 | ||
|
|
589576257e | ||
|
|
b5a2e5c727 | ||
|
|
761fbe3ffb | ||
|
|
d1ec83039c | ||
|
|
1a37c1542d | ||
|
|
90339b1c62 | ||
|
|
ff4de9ac11 | ||
|
|
1f9f821543 | ||
|
|
dbd7756163 | ||
|
|
aa6ad4d712 | ||
|
|
9a255944e4 | ||
|
|
82661589fb | ||
|
|
adaf3b15de | ||
|
|
67784b45fd | ||
|
|
eda1599c0d | ||
|
|
4452630b33 | ||
|
|
141f7409ef | ||
|
|
9e97aa20c2 | ||
|
|
4e97fcd776 | ||
|
|
a3a1e9cf9e | ||
|
|
f087af3946 | ||
|
|
bafe3f9c45 | ||
|
|
99184371f7 | ||
|
|
c919435598 | ||
|
|
635efeab7b | ||
|
|
711494b72e | ||
|
|
245962d5a5 | ||
|
|
c5cd823aaa | ||
|
|
981a8e93ba | ||
|
|
0ce17ff212 | ||
|
|
7da5443808 | ||
|
|
535a27309e | ||
|
|
3eda3845fa | ||
|
|
b6cf177630 | ||
|
|
04eca3c9aa | ||
|
|
e538bfad1f | ||
|
|
7c4c777c99 | ||
|
|
08c4027e77 | ||
|
|
a554d87531 | ||
|
|
dcbd974ac5 | ||
|
|
1c8e709870 | ||
|
|
716a35779c | ||
|
|
ce9d541981 | ||
|
|
3f7516fa9b |
2
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
2
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
@@ -2,7 +2,7 @@ name: Bug report
|
||||
description: Create a bug report to help us improve Plane
|
||||
title: "[bug]: "
|
||||
labels: [🐛bug]
|
||||
assignees: [srinivaspendem, pushya22]
|
||||
assignees: [vihar, pushya22]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
@@ -2,7 +2,7 @@ name: Feature request
|
||||
description: Suggest a feature to improve Plane
|
||||
title: "[feature]: "
|
||||
labels: [✨feature]
|
||||
assignees: [srinivaspendem, pushya22]
|
||||
assignees: [vihar, pushya22]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
102
.github/workflows/build-aio-base.yml
vendored
102
.github/workflows/build-aio-base.yml
vendored
@@ -2,6 +2,11 @@ name: Build AIO Base Image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
base_tag_name:
|
||||
description: 'Base Tag Name'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
env:
|
||||
TARGET_BRANCH: ${{ github.ref_name }}
|
||||
@@ -16,37 +21,46 @@ jobs:
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
build_base: ${{ steps.changed_files.outputs.base_any_changed }}
|
||||
image_tag: ${{ steps.set_env_variables.outputs.IMAGE_TAG }}
|
||||
|
||||
steps:
|
||||
- id: set_env_variables
|
||||
name: Set Environment Variables
|
||||
run: |
|
||||
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
|
||||
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ github.event.inputs.base_tag_name }}" != "" ]; then
|
||||
echo "IMAGE_TAG=${{ github.event.inputs.base_tag_name }}" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
echo "IMAGE_TAG=latest" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "preview" ]; then
|
||||
echo "IMAGE_TAG=preview" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "IMAGE_TAG=develop" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
uses: tj-actions/changed-files@v42
|
||||
with:
|
||||
files_yaml: |
|
||||
base:
|
||||
- aio/Dockerfile.base
|
||||
|
||||
base_build_push:
|
||||
if: ${{ needs.base_build_setup.outputs.build_base == 'true' || github.event_name == 'workflow_dispatch' || needs.base_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
full_base_build_push:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [base_build_setup]
|
||||
env:
|
||||
BASE_IMG_TAG: makeplane/plane-aio-base:${{ needs.base_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.base_build_setup.outputs.gh_branch_name }}
|
||||
BASE_IMG_TAG: makeplane/plane-aio-base:full-${{ needs.base_build_setup.outputs.image_tag }}
|
||||
BUILDX_DRIVER: ${{ needs.base_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.base_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.base_build_setup.outputs.gh_buildx_platforms }}
|
||||
@@ -55,15 +69,6 @@ jobs:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Docker Tag
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-aio-base:latest
|
||||
else
|
||||
TAG=${{ env.BASE_IMG_TAG }}
|
||||
fi
|
||||
echo "BASE_IMG_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -81,10 +86,53 @@ jobs:
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: ./aio
|
||||
file: ./aio/Dockerfile.base
|
||||
file: ./aio/Dockerfile-base-full
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.BASE_IMG_TAG }}
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
slim_base_build_push:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [base_build_setup]
|
||||
env:
|
||||
BASE_IMG_TAG: makeplane/plane-aio-base:slim-${{ needs.base_build_setup.outputs.image_tag }}
|
||||
BUILDX_DRIVER: ${{ needs.base_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.base_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.base_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.base_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: ./aio
|
||||
file: ./aio/Dockerfile-base-slim
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.BASE_IMG_TAG }}
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
|
||||
207
.github/workflows/build-aio-branch.yml
vendored
Normal file
207
.github/workflows/build-aio-branch.yml
vendored
Normal file
@@ -0,0 +1,207 @@
|
||||
name: Branch Build AIO
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
full:
|
||||
description: 'Run full build'
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
slim:
|
||||
description: 'Run slim build'
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
base_tag_name:
|
||||
description: 'Base Tag Name'
|
||||
required: false
|
||||
default: ''
|
||||
release:
|
||||
types: [released, prereleased]
|
||||
|
||||
env:
|
||||
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
|
||||
FULL_BUILD_INPUT: ${{ github.event.inputs.full }}
|
||||
SLIM_BUILD_INPUT: ${{ github.event.inputs.slim }}
|
||||
|
||||
jobs:
|
||||
branch_build_setup:
|
||||
name: Build Setup
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||
flat_branch_name: ${{ steps.set_env_variables.outputs.FLAT_BRANCH_NAME }}
|
||||
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
aio_base_tag: ${{ steps.set_env_variables.outputs.AIO_BASE_TAG }}
|
||||
do_full_build: ${{ steps.set_env_variables.outputs.DO_FULL_BUILD }}
|
||||
do_slim_build: ${{ steps.set_env_variables.outputs.DO_SLIM_BUILD }}
|
||||
|
||||
steps:
|
||||
- id: set_env_variables
|
||||
name: Set Environment Variables
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ] || [ "${{ github.event_name }}" == "release" ]; then
|
||||
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "AIO_BASE_TAG=latest" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ github.event_name}}" == "workflow_dispatch" ] && [ "${{ github.event.inputs.base_tag_name }}" != "" ]; then
|
||||
echo "AIO_BASE_TAG=${{ github.event.inputs.base_tag_name }}" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "preview" ]; then
|
||||
echo "AIO_BASE_TAG=preview" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "AIO_BASE_TAG=develop" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
fi
|
||||
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ env.FULL_BUILD_INPUT }}" == "true" ] || [ "${{github.event_name}}" == "push" ] || [ "${{github.event_name}}" == "release" ]; then
|
||||
echo "DO_FULL_BUILD=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "DO_FULL_BUILD=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [ "${{ env.SLIM_BUILD_INPUT }}" == "true" ] || [ "${{github.event_name}}" == "push" ] || [ "${{github.event_name}}" == "release" ]; then
|
||||
echo "DO_SLIM_BUILD=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "DO_SLIM_BUILD=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
FLAT_BRANCH_NAME=$(echo "${{ env.TARGET_BRANCH }}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "FLAT_BRANCH_NAME=$FLAT_BRANCH_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
full_build_push:
|
||||
if: ${{ needs.branch_build_setup.outputs.do_full_build == 'true' }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BUILD_TYPE: full
|
||||
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
|
||||
AIO_IMAGE_TAGS: makeplane/plane-aio:full-${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-stable,makeplane/plane-aio:${{env.BUILD_TYPE}}-${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-latest
|
||||
else
|
||||
TAG=${{ env.AIO_IMAGE_TAGS }}
|
||||
fi
|
||||
echo "AIO_IMAGE_TAGS=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./aio/Dockerfile-app
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.AIO_IMAGE_TAGS }}
|
||||
push: true
|
||||
build-args: |
|
||||
BASE_TAG=${{ env.AIO_BASE_TAG }}
|
||||
BUILD_TYPE=${{env.BUILD_TYPE}}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
slim_build_push:
|
||||
if: ${{ needs.branch_build_setup.outputs.do_slim_build == 'true' }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BUILD_TYPE: slim
|
||||
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
|
||||
AIO_IMAGE_TAGS: makeplane/plane-aio:slim-${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-stable,makeplane/plane-aio:${{env.BUILD_TYPE}}-${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-latest
|
||||
else
|
||||
TAG=${{ env.AIO_IMAGE_TAGS }}
|
||||
fi
|
||||
echo "AIO_IMAGE_TAGS=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./aio/Dockerfile-app
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.AIO_IMAGE_TAGS }}
|
||||
push: true
|
||||
build-args: |
|
||||
BASE_TAG=${{ env.AIO_BASE_TAG }}
|
||||
BUILD_TYPE=${{env.BUILD_TYPE}}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
318
.github/workflows/feature-deployment.yml
vendored
318
.github/workflows/feature-deployment.yml
vendored
@@ -3,189 +3,108 @@ name: Feature Preview
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
web-build:
|
||||
base_tag_name:
|
||||
description: 'Base Tag Name'
|
||||
required: false
|
||||
description: "Build Web"
|
||||
type: boolean
|
||||
default: true
|
||||
space-build:
|
||||
required: false
|
||||
description: "Build Space"
|
||||
type: boolean
|
||||
default: false
|
||||
admin-build:
|
||||
required: false
|
||||
description: "Build Admin"
|
||||
type: boolean
|
||||
default: false
|
||||
default: 'preview'
|
||||
|
||||
env:
|
||||
BUILD_WEB: ${{ github.event.inputs.web-build }}
|
||||
BUILD_SPACE: ${{ github.event.inputs.space-build }}
|
||||
BUILD_ADMIN: ${{ github.event.inputs.admin-build }}
|
||||
TARGET_BRANCH: ${{ github.ref_name }}
|
||||
|
||||
jobs:
|
||||
setup-feature-build:
|
||||
name: Feature Build Setup
|
||||
branch_build_setup:
|
||||
name: Build Setup
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
run: |
|
||||
echo "BUILD_WEB=$BUILD_WEB"
|
||||
echo "BUILD_SPACE=$BUILD_SPACE"
|
||||
echo "BUILD_ADMIN=$BUILD_ADMIN"
|
||||
outputs:
|
||||
web-build: ${{ env.BUILD_WEB}}
|
||||
space-build: ${{env.BUILD_SPACE}}
|
||||
admin-build: ${{env.BUILD_ADMIN}}
|
||||
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||
flat_branch_name: ${{ steps.set_env_variables.outputs.FLAT_BRANCH_NAME }}
|
||||
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
aio_base_tag: ${{ steps.set_env_variables.outputs.AIO_BASE_TAG }}
|
||||
do_full_build: ${{ steps.set_env_variables.outputs.DO_FULL_BUILD }}
|
||||
do_slim_build: ${{ steps.set_env_variables.outputs.DO_SLIM_BUILD }}
|
||||
|
||||
feature-build-web:
|
||||
if: ${{ needs.setup-feature-build.outputs.web-build == 'true' }}
|
||||
needs: setup-feature-build
|
||||
name: Feature Build Web
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
||||
NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }}
|
||||
steps:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
- name: Install AWS cli
|
||||
- id: set_env_variables
|
||||
name: Set Environment Variables
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y python3-pip
|
||||
pip3 install awscli
|
||||
- name: Checkout
|
||||
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ github.event.inputs.base_tag_name }}" != "" ]; then
|
||||
echo "AIO_BASE_TAG=${{ github.event.inputs.base_tag_name }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "AIO_BASE_TAG=develop" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
FLAT_BRANCH_NAME=$(echo "${{ env.TARGET_BRANCH }}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "FLAT_BRANCH_NAME=$FLAT_BRANCH_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: plane
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/plane
|
||||
yarn install
|
||||
- name: Build Web
|
||||
id: build-web
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/plane
|
||||
yarn build --filter=web
|
||||
cd $GITHUB_WORKSPACE
|
||||
|
||||
TAR_NAME="web.tar.gz"
|
||||
tar -czf $TAR_NAME ./plane
|
||||
|
||||
FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ")
|
||||
aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY
|
||||
|
||||
feature-build-space:
|
||||
if: ${{ needs.setup-feature-build.outputs.space-build == 'true' }}
|
||||
needs: setup-feature-build
|
||||
name: Feature Build Space
|
||||
runs-on: ubuntu-latest
|
||||
full_build_push:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
||||
NEXT_PUBLIC_SPACE_BASE_PATH: "/spaces"
|
||||
NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }}
|
||||
BUILD_TYPE: full
|
||||
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
|
||||
AIO_IMAGE_TAGS: makeplane/plane-aio-feature:${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./aio/Dockerfile-app
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.AIO_IMAGE_TAGS }}
|
||||
push: true
|
||||
build-args:
|
||||
BUILD_TAG=${{ env.AIO_BASE_TAG }}
|
||||
BUILD_TYPE=${{env.BUILD_TYPE}}
|
||||
# cache-from: type=gha
|
||||
# cache-to: type=gha,mode=max
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
outputs:
|
||||
do-build: ${{ needs.setup-feature-build.outputs.space-build }}
|
||||
s3-url: ${{ steps.build-space.outputs.S3_PRESIGNED_URL }}
|
||||
steps:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
- name: Install AWS cli
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y python3-pip
|
||||
pip3 install awscli
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: plane
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/plane
|
||||
yarn install
|
||||
- name: Build Space
|
||||
id: build-space
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/plane
|
||||
yarn build --filter=space
|
||||
cd $GITHUB_WORKSPACE
|
||||
|
||||
TAR_NAME="space.tar.gz"
|
||||
tar -czf $TAR_NAME ./plane
|
||||
|
||||
FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ")
|
||||
aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY
|
||||
|
||||
feature-build-admin:
|
||||
if: ${{ needs.setup-feature-build.outputs.admin-build == 'true' }}
|
||||
needs: setup-feature-build
|
||||
name: Feature Build Admin
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
||||
NEXT_PUBLIC_ADMIN_BASE_PATH: "/god-mode"
|
||||
NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }}
|
||||
outputs:
|
||||
do-build: ${{ needs.setup-feature-build.outputs.admin-build }}
|
||||
s3-url: ${{ steps.build-admin.outputs.S3_PRESIGNED_URL }}
|
||||
steps:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
- name: Install AWS cli
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y python3-pip
|
||||
pip3 install awscli
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: plane
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/plane
|
||||
yarn install
|
||||
- name: Build Admin
|
||||
id: build-admin
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/plane
|
||||
yarn build --filter=admin
|
||||
cd $GITHUB_WORKSPACE
|
||||
|
||||
TAR_NAME="admin.tar.gz"
|
||||
tar -czf $TAR_NAME ./plane
|
||||
|
||||
FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ")
|
||||
aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY
|
||||
AIO_IMAGE_TAGS: ${{ env.AIO_IMAGE_TAGS }}
|
||||
|
||||
feature-deploy:
|
||||
if: ${{ always() && (needs.setup-feature-build.outputs.web-build == 'true' || needs.setup-feature-build.outputs.space-build == 'true' || needs.setup-feature-build.outputs.admin-build == 'true') }}
|
||||
needs:
|
||||
[
|
||||
setup-feature-build,
|
||||
feature-build-web,
|
||||
feature-build-space,
|
||||
feature-build-admin,
|
||||
]
|
||||
needs: [branch_build_setup, full_build_push]
|
||||
name: Feature Deploy
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
||||
KUBE_CONFIG_FILE: ${{ secrets.FEATURE_PREVIEW_KUBE_CONFIG }}
|
||||
DEPLOYMENT_NAME: ${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
steps:
|
||||
- name: Install AWS cli
|
||||
run: |
|
||||
@@ -213,54 +132,37 @@ jobs:
|
||||
./get_helm.sh
|
||||
- name: App Deploy
|
||||
run: |
|
||||
WEB_S3_URL=""
|
||||
if [ ${{ env.BUILD_WEB }} == true ]; then
|
||||
WEB_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/web.tar.gz --expires-in 3600)
|
||||
fi
|
||||
helm --kube-insecure-skip-tls-verify repo add feature-preview ${{ vars.FEATURE_PREVIEW_HELM_CHART_URL }}
|
||||
|
||||
SPACE_S3_URL=""
|
||||
if [ ${{ env.BUILD_SPACE }} == true ]; then
|
||||
SPACE_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/space.tar.gz --expires-in 3600)
|
||||
fi
|
||||
APP_NAMESPACE="${{ vars.FEATURE_PREVIEW_NAMESPACE }}"
|
||||
|
||||
ADMIN_S3_URL=""
|
||||
if [ ${{ env.BUILD_ADMIN }} == true ]; then
|
||||
ADMIN_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/admin.tar.gz --expires-in 3600)
|
||||
fi
|
||||
helm --kube-insecure-skip-tls-verify uninstall \
|
||||
${{ env.DEPLOYMENT_NAME }} \
|
||||
--namespace $APP_NAMESPACE \
|
||||
--timeout 10m0s \
|
||||
--wait \
|
||||
--ignore-not-found
|
||||
|
||||
if [ ${{ env.BUILD_WEB }} == true ] || [ ${{ env.BUILD_SPACE }} == true ] || [ ${{ env.BUILD_ADMIN }} == true ]; then
|
||||
METADATA=$(helm --kube-insecure-skip-tls-verify upgrade \
|
||||
--install=true \
|
||||
--namespace $APP_NAMESPACE \
|
||||
--set dockerhub.loginid=${{ secrets.DOCKERHUB_USERNAME }} \
|
||||
--set dockerhub.password=${{ secrets.DOCKERHUB_TOKEN_RO}} \
|
||||
--set config.feature_branch=${{ env.DEPLOYMENT_NAME }} \
|
||||
--set ingress.primaryDomain=${{vars.FEATURE_PREVIEW_PRIMARY_DOMAIN || 'feature.plane.tools' }} \
|
||||
--set ingress.tls_secret=${{vars.FEATURE_PREVIEW_INGRESS_TLS_SECRET || '' }} \
|
||||
--output json \
|
||||
--timeout 10m0s \
|
||||
--wait \
|
||||
${{ env.DEPLOYMENT_NAME }} feature-preview/${{ vars.FEATURE_PREVIEW_HELM_CHART_NAME }} )
|
||||
|
||||
helm --kube-insecure-skip-tls-verify repo add feature-preview ${{ vars.FEATURE_PREVIEW_HELM_CHART_URL }}
|
||||
APP_NAME=$(echo $METADATA | jq -r '.name')
|
||||
|
||||
APP_NAMESPACE="${{ vars.FEATURE_PREVIEW_NAMESPACE }}"
|
||||
DEPLOY_SCRIPT_URL="${{ vars.FEATURE_PREVIEW_DEPLOY_SCRIPT_URL }}"
|
||||
INGRESS_HOSTNAME=$(kubectl get ingress -n $APP_NAMESPACE --insecure-skip-tls-verify \
|
||||
-o jsonpath='{.items[?(@.metadata.annotations.meta\.helm\.sh\/release-name=="'$APP_NAME'")]}' | \
|
||||
jq -r '.spec.rules[0].host')
|
||||
|
||||
METADATA=$(helm --kube-insecure-skip-tls-verify install feature-preview/${{ vars.FEATURE_PREVIEW_HELM_CHART_NAME }} \
|
||||
--generate-name \
|
||||
--namespace $APP_NAMESPACE \
|
||||
--set ingress.primaryDomain=${{vars.FEATURE_PREVIEW_PRIMARY_DOMAIN || 'feature.plane.tools' }} \
|
||||
--set web.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \
|
||||
--set web.enabled=${{ env.BUILD_WEB || false }} \
|
||||
--set web.artifact_url=$WEB_S3_URL \
|
||||
--set space.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \
|
||||
--set space.enabled=${{ env.BUILD_SPACE || false }} \
|
||||
--set space.artifact_url=$SPACE_S3_URL \
|
||||
--set admin.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \
|
||||
--set admin.enabled=${{ env.BUILD_ADMIN || false }} \
|
||||
--set admin.artifact_url=$ADMIN_S3_URL \
|
||||
--set shared_config.deploy_script_url=$DEPLOY_SCRIPT_URL \
|
||||
--set shared_config.api_base_url=${{vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL}} \
|
||||
--output json \
|
||||
--timeout 1000s)
|
||||
|
||||
APP_NAME=$(echo $METADATA | jq -r '.name')
|
||||
|
||||
INGRESS_HOSTNAME=$(kubectl get ingress -n feature-builds --insecure-skip-tls-verify \
|
||||
-o jsonpath='{.items[?(@.metadata.annotations.meta\.helm\.sh\/release-name=="'$APP_NAME'")]}' | \
|
||||
jq -r '.spec.rules[0].host')
|
||||
|
||||
echo "****************************************"
|
||||
echo "APP NAME ::: $APP_NAME"
|
||||
echo "INGRESS HOSTNAME ::: $INGRESS_HOSTNAME"
|
||||
echo "****************************************"
|
||||
fi
|
||||
echo "****************************************"
|
||||
echo "APP NAME ::: $APP_NAME"
|
||||
echo "INGRESS HOSTNAME ::: $INGRESS_HOSTNAME"
|
||||
echo "****************************************"
|
||||
|
||||
85
ENV_SETUP.md
85
ENV_SETUP.md
@@ -1,6 +1,5 @@
|
||||
# Environment Variables
|
||||
|
||||
|
||||
Environment variables are distributed in various files. Please refer them carefully.
|
||||
|
||||
## {PROJECT_FOLDER}/.env
|
||||
@@ -9,17 +8,13 @@ File is available in the project root folder
|
||||
|
||||
```
|
||||
# Database Settings
|
||||
PGUSER="plane"
|
||||
PGPASSWORD="plane"
|
||||
PGHOST="plane-db"
|
||||
PGDATABASE="plane"
|
||||
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||
|
||||
POSTGRES_USER="plane"
|
||||
POSTGRES_PASSWORD="plane"
|
||||
POSTGRES_DB="plane"
|
||||
PGDATA="/var/lib/postgresql/data"
|
||||
# Redis Settings
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
@@ -29,63 +24,39 @@ AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
# Maximum file upload limit
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
|
||||
# GPT settings
|
||||
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||
OPENAI_API_KEY="sk-" # deprecated
|
||||
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||
|
||||
# Settings related to Docker
|
||||
DOCKERIZED=1 # deprecated
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
```
|
||||
|
||||
|
||||
|
||||
## {PROJECT_FOLDER}/web/.env.example
|
||||
|
||||
|
||||
|
||||
```
|
||||
# Public boards deploy URL
|
||||
NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
|
||||
```
|
||||
|
||||
## {PROJECT_FOLDER}/apiserver/.env
|
||||
|
||||
|
||||
|
||||
```
|
||||
# Backend
|
||||
# Debug value for api server use it as 0 for production use
|
||||
DEBUG=0
|
||||
|
||||
CORS_ALLOWED_ORIGINS="http://localhost"
|
||||
# Error logs
|
||||
SENTRY_DSN=""
|
||||
|
||||
SENTRY_ENVIRONMENT="development"
|
||||
# Database Settings
|
||||
PGUSER="plane"
|
||||
PGPASSWORD="plane"
|
||||
PGHOST="plane-db"
|
||||
PGDATABASE="plane"
|
||||
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||
|
||||
POSTGRES_USER="plane"
|
||||
POSTGRES_PASSWORD="plane"
|
||||
POSTGRES_HOST="plane-db"
|
||||
POSTGRES_DB="plane"
|
||||
POSTGRES_PORT=5432
|
||||
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||
# Redis Settings
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||
|
||||
# Email Settings
|
||||
EMAIL_HOST=""
|
||||
EMAIL_HOST_USER=""
|
||||
EMAIL_HOST_PASSWORD=""
|
||||
EMAIL_PORT=587
|
||||
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
||||
EMAIL_USE_TLS="1"
|
||||
EMAIL_USE_SSL="0"
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
@@ -95,35 +66,25 @@ AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
# Maximum file upload limit
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
|
||||
# GPT settings
|
||||
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||
OPENAI_API_KEY="sk-" # deprecated
|
||||
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||
|
||||
# Settings related to Docker
|
||||
DOCKERIZED=1 # Deprecated
|
||||
|
||||
# Github
|
||||
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
||||
|
||||
DOCKERIZED=1 # deprecated
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
|
||||
|
||||
# SignUps
|
||||
ENABLE_SIGNUP="1"
|
||||
|
||||
# Email Redirection URL
|
||||
# Email redirections and minio domain settings
|
||||
WEB_URL="http://localhost"
|
||||
# Gunicorn Workers
|
||||
GUNICORN_WORKERS=2
|
||||
# Base URLs
|
||||
ADMIN_BASE_URL=
|
||||
SPACE_BASE_URL=
|
||||
APP_BASE_URL=
|
||||
SECRET_KEY="gxoytl7dmnc1y37zahah820z5iq3iozu38cnfjtu3yaau9cd9z"
|
||||
```
|
||||
|
||||
## Updates
|
||||
|
||||
- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects.
|
||||
- The naming convention for containers and images has been updated.
|
||||
- The plane-worker image will no longer be maintained, as it has been merged with plane-backend.
|
||||
- The Tiptap pro-extension dependency has been removed, eliminating the need for Tiptap API keys.
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Metadata } from "next";
|
||||
import { AdminLayout } from "@/layouts/admin-layout";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "AI Settings - God Mode",
|
||||
title: "Artificial Intelligence Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function AILayout({ children }: { children: ReactNode }) {
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
import { observer } from "mobx-react";
|
||||
import useSWR from "swr";
|
||||
import { Loader } from "@plane/ui";
|
||||
// components
|
||||
import { PageHeader } from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// components
|
||||
@@ -18,7 +16,6 @@ const InstanceAIPage = observer(() => {
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Artificial Intelligence - God Mode" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<div className="text-xl font-medium text-custom-text-100">AI features for all your workspaces</div>
|
||||
|
||||
@@ -10,6 +10,7 @@ import { IFormattedInstanceConfiguration, TInstanceGithubAuthenticationConfigura
|
||||
import { Button, TOAST_TYPE, getButtonStyling, setToast } from "@plane/ui";
|
||||
// components
|
||||
import {
|
||||
CodeBlock,
|
||||
ConfirmDiscardModal,
|
||||
ControllerInput,
|
||||
CopyField,
|
||||
@@ -102,7 +103,8 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
url: originURL,
|
||||
description: (
|
||||
<>
|
||||
We will auto-generate this. Paste this into the Authorized origin URL field{" "}
|
||||
We will auto-generate this. Paste this into the{" "}
|
||||
<CodeBlock darkerShade>Authorized origin URL</CodeBlock> field{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://github.com/settings/applications/new"
|
||||
@@ -121,7 +123,8 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
url: `${originURL}/auth/github/callback/`,
|
||||
description: (
|
||||
<>
|
||||
We will auto-generate this. Paste this into your Authorized Callback URI field{" "}
|
||||
We will auto-generate this. Paste this into your{" "}
|
||||
<CodeBlock darkerShade>Authorized Callback URI</CodeBlock> field{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://github.com/settings/applications/new"
|
||||
@@ -143,8 +146,8 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
.then((response = []) => {
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Success",
|
||||
message: "Github Configuration Settings updated successfully",
|
||||
title: "Done!",
|
||||
message: "Your GitHub authentication is configured. You should test it now.",
|
||||
});
|
||||
reset({
|
||||
GITHUB_CLIENT_ID: response.find((item) => item.key === "GITHUB_CLIENT_ID")?.value,
|
||||
@@ -170,8 +173,8 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
/>
|
||||
<div className="flex flex-col gap-8">
|
||||
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1">
|
||||
<div className="pt-2 text-xl font-medium">Configuration</div>
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1 pt-1">
|
||||
<div className="pt-2.5 text-xl font-medium">GitHub-provided details for Plane</div>
|
||||
{GITHUB_FORM_FIELDS.map((field) => (
|
||||
<ControllerInput
|
||||
key={field.key}
|
||||
@@ -201,8 +204,8 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
</div>
|
||||
</div>
|
||||
<div className="col-span-2 md:col-span-1">
|
||||
<div className="flex flex-col gap-y-4 px-6 py-4 my-2 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Service provider details</div>
|
||||
<div className="flex flex-col gap-y-4 px-6 pt-1.5 pb-4 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Plane-provided details for GitHub</div>
|
||||
{GITHUB_SERVICE_FIELD.map((field) => (
|
||||
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
|
||||
))}
|
||||
|
||||
@@ -63,7 +63,7 @@ const InstanceGithubAuthenticationPage = observer(() => {
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Authentication - God Mode" />
|
||||
<PageHeader title="GitHub Authentication - Plane Web" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<AuthenticationMethodCard
|
||||
@@ -93,7 +93,7 @@ const InstanceGithubAuthenticationPage = observer(() => {
|
||||
withBorder={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md p-4">
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<InstanceGithubConfigForm config={formattedConfig} />
|
||||
) : (
|
||||
|
||||
@@ -8,6 +8,7 @@ import { IFormattedInstanceConfiguration, TInstanceGitlabAuthenticationConfigura
|
||||
import { Button, TOAST_TYPE, getButtonStyling, setToast } from "@plane/ui";
|
||||
// components
|
||||
import {
|
||||
CodeBlock,
|
||||
ConfirmDiscardModal,
|
||||
ControllerInput,
|
||||
CopyField,
|
||||
@@ -54,7 +55,7 @@ export const InstanceGitlabConfigForm: FC<Props> = (props) => {
|
||||
label: "Host",
|
||||
description: (
|
||||
<>
|
||||
This is the <b>GitLab host</b> to use for login, <b>including scheme</b>.
|
||||
This is either https://gitlab.com or the <CodeBlock>domain.tld</CodeBlock> where you host GitLab.
|
||||
</>
|
||||
),
|
||||
placeholder: "https://gitlab.com",
|
||||
@@ -116,7 +117,8 @@ export const InstanceGitlabConfigForm: FC<Props> = (props) => {
|
||||
url: `${originURL}/auth/gitlab/callback/`,
|
||||
description: (
|
||||
<>
|
||||
We will auto-generate this. Paste this into the <b>Redirect URI</b> field of your{" "}
|
||||
We will auto-generate this. Paste this into the{" "}
|
||||
<CodeBlock darkerShade>Redirect URI</CodeBlock> field of your{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://docs.gitlab.com/ee/integration/oauth_provider.html"
|
||||
@@ -139,8 +141,8 @@ export const InstanceGitlabConfigForm: FC<Props> = (props) => {
|
||||
.then((response = []) => {
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Success",
|
||||
message: "GitLab Configuration Settings updated successfully",
|
||||
title: "Done!",
|
||||
message: "Your GitLab authentication is configured. You should test it now.",
|
||||
});
|
||||
reset({
|
||||
GITLAB_HOST: response.find((item) => item.key === "GITLAB_HOST")?.value,
|
||||
@@ -167,8 +169,8 @@ export const InstanceGitlabConfigForm: FC<Props> = (props) => {
|
||||
/>
|
||||
<div className="flex flex-col gap-8">
|
||||
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1">
|
||||
<div className="pt-2 text-xl font-medium">Configuration</div>
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1 pt-1">
|
||||
<div className="pt-2.5 text-xl font-medium">GitLab-provided details for Plane</div>
|
||||
{GITLAB_FORM_FIELDS.map((field) => (
|
||||
<ControllerInput
|
||||
key={field.key}
|
||||
@@ -198,8 +200,8 @@ export const InstanceGitlabConfigForm: FC<Props> = (props) => {
|
||||
</div>
|
||||
</div>
|
||||
<div className="col-span-2 md:col-span-1">
|
||||
<div className="flex flex-col gap-y-4 px-6 py-4 my-2 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Service provider details</div>
|
||||
<div className="flex flex-col gap-y-4 px-6 pt-1.5 pb-4 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Plane-provided details for GitLab</div>
|
||||
{GITLAB_SERVICE_FIELD.map((field) => (
|
||||
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
|
||||
))}
|
||||
|
||||
@@ -57,7 +57,7 @@ const InstanceGitlabAuthenticationPage = observer(() => {
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Authentication - God Mode" />
|
||||
<PageHeader title="GitLab Authentication - Plane Web" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<AuthenticationMethodCard
|
||||
@@ -80,7 +80,7 @@ const InstanceGitlabAuthenticationPage = observer(() => {
|
||||
withBorder={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md p-4">
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<InstanceGitlabConfigForm config={formattedConfig} />
|
||||
) : (
|
||||
|
||||
@@ -9,6 +9,7 @@ import { IFormattedInstanceConfiguration, TInstanceGoogleAuthenticationConfigura
|
||||
import { Button, TOAST_TYPE, getButtonStyling, setToast } from "@plane/ui";
|
||||
// components
|
||||
import {
|
||||
CodeBlock,
|
||||
ConfirmDiscardModal,
|
||||
ControllerInput,
|
||||
CopyField,
|
||||
@@ -101,7 +102,8 @@ export const InstanceGoogleConfigForm: FC<Props> = (props) => {
|
||||
url: originURL,
|
||||
description: (
|
||||
<p>
|
||||
We will auto-generate this. Paste this into your Authorized JavaScript origins field. For this OAuth client{" "}
|
||||
We will auto-generate this. Paste this into your{" "}
|
||||
<CodeBlock darkerShade>Authorized JavaScript origins</CodeBlock> field. For this OAuth client{" "}
|
||||
<a
|
||||
href="https://console.cloud.google.com/apis/credentials/oauthclient"
|
||||
target="_blank"
|
||||
@@ -119,7 +121,8 @@ export const InstanceGoogleConfigForm: FC<Props> = (props) => {
|
||||
url: `${originURL}/auth/google/callback/`,
|
||||
description: (
|
||||
<p>
|
||||
We will auto-generate this. Paste this into your Authorized Redirect URI field. For this OAuth client{" "}
|
||||
We will auto-generate this. Paste this into your <CodeBlock darkerShade>Authorized Redirect URI</CodeBlock>{" "}
|
||||
field. For this OAuth client{" "}
|
||||
<a
|
||||
href="https://console.cloud.google.com/apis/credentials/oauthclient"
|
||||
target="_blank"
|
||||
@@ -140,8 +143,8 @@ export const InstanceGoogleConfigForm: FC<Props> = (props) => {
|
||||
.then((response = []) => {
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Success",
|
||||
message: "Google Configuration Settings updated successfully",
|
||||
title: "Done!",
|
||||
message: "Your Google authentication is configured. You should test it now.",
|
||||
});
|
||||
reset({
|
||||
GOOGLE_CLIENT_ID: response.find((item) => item.key === "GOOGLE_CLIENT_ID")?.value,
|
||||
@@ -167,8 +170,8 @@ export const InstanceGoogleConfigForm: FC<Props> = (props) => {
|
||||
/>
|
||||
<div className="flex flex-col gap-8">
|
||||
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1">
|
||||
<div className="pt-2 text-xl font-medium">Configuration</div>
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1 pt-1">
|
||||
<div className="pt-2.5 text-xl font-medium">Google-provided details for Plane</div>
|
||||
{GOOGLE_FORM_FIELDS.map((field) => (
|
||||
<ControllerInput
|
||||
key={field.key}
|
||||
@@ -198,8 +201,8 @@ export const InstanceGoogleConfigForm: FC<Props> = (props) => {
|
||||
</div>
|
||||
</div>
|
||||
<div className="col-span-2 md:col-span-1">
|
||||
<div className="flex flex-col gap-y-4 px-6 py-4 my-2 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Service provider details</div>
|
||||
<div className="flex flex-col gap-y-4 px-6 pt-1.5 pb-4 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Plane-provided details for Google</div>
|
||||
{GOOGLE_SERVICE_DETAILS.map((field) => (
|
||||
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
|
||||
))}
|
||||
|
||||
@@ -57,7 +57,7 @@ const InstanceGoogleAuthenticationPage = observer(() => {
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Authentication - God Mode" />
|
||||
<PageHeader title="Google Authentication - Plane Web" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<AuthenticationMethodCard
|
||||
@@ -81,7 +81,7 @@ const InstanceGoogleAuthenticationPage = observer(() => {
|
||||
withBorder={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md p-4">
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<InstanceGoogleConfigForm config={formattedConfig} />
|
||||
) : (
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Metadata } from "next";
|
||||
import { AdminLayout } from "@/layouts/admin-layout";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Authentication Settings - God Mode",
|
||||
title: "Authentication Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function AuthenticationLayout({ children }: { children: ReactNode }) {
|
||||
|
||||
@@ -5,7 +5,6 @@ import { observer } from "mobx-react";
|
||||
import useSWR from "swr";
|
||||
import { TInstanceConfigurationKeys } from "@plane/types";
|
||||
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
|
||||
import { PageHeader } from "@/components/common";
|
||||
// helpers
|
||||
import { cn } from "@/helpers/common.helper";
|
||||
// hooks
|
||||
@@ -34,7 +33,7 @@ const InstanceAuthenticationPage = observer(() => {
|
||||
const updateConfigPromise = updateInstanceConfigurations(payload);
|
||||
|
||||
setPromiseToast(updateConfigPromise, {
|
||||
loading: "Saving Configuration...",
|
||||
loading: "Saving configuration",
|
||||
success: {
|
||||
title: "Success",
|
||||
message: () => "Configuration saved successfully",
|
||||
@@ -57,10 +56,9 @@ const InstanceAuthenticationPage = observer(() => {
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Authentication - God Mode" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<div className="text-xl font-medium text-custom-text-100">Manage authentication for your instance</div>
|
||||
<div className="text-xl font-medium text-custom-text-100">Manage authentication modes for your instance</div>
|
||||
<div className="text-sm font-normal text-custom-text-300">
|
||||
Configure authentication modes for your team and restrict sign ups to be invite only.
|
||||
</div>
|
||||
@@ -68,15 +66,12 @@ const InstanceAuthenticationPage = observer(() => {
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<div className="space-y-3">
|
||||
<div className="text-lg font-medium pb-1">Sign-up configuration</div>
|
||||
<div className={cn("w-full flex items-center gap-14 rounded")}>
|
||||
<div className="flex grow items-center gap-4">
|
||||
<div className="grow">
|
||||
<div className={cn("font-medium leading-5 text-custom-text-100 text-sm")}>
|
||||
Allow anyone to sign up without invite
|
||||
</div>
|
||||
<div className="text-lg font-medium pb-1">Allow anyone to sign up even without an invite</div>
|
||||
<div className={cn("font-normal leading-5 text-custom-text-300 text-xs")}>
|
||||
Toggling this off will disable self sign ups.
|
||||
Toggling this off will only let users sign up when they are invited.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -7,7 +7,7 @@ interface EmailLayoutProps {
|
||||
}
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Email Settings - God Mode",
|
||||
title: "Email Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function EmailLayout({ children }: EmailLayoutProps) {
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
import { observer } from "mobx-react";
|
||||
import useSWR from "swr";
|
||||
import { Loader } from "@plane/ui";
|
||||
// components
|
||||
import { PageHeader } from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// components
|
||||
@@ -18,7 +16,6 @@ const InstanceEmailPage = observer(() => {
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Email - God Mode" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<div className="text-xl font-medium text-custom-text-100">Secure emails from your own instance</div>
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Metadata } from "next";
|
||||
import { AdminLayout } from "@/layouts/admin-layout";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "General Settings - God Mode",
|
||||
title: "General Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function GeneralLayout({ children }: { children: ReactNode }) {
|
||||
|
||||
@@ -7,7 +7,7 @@ interface ImageLayoutProps {
|
||||
}
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Images Settings - God Mode",
|
||||
title: "Images Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function ImageLayout({ children }: ImageLayoutProps) {
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
import { observer } from "mobx-react";
|
||||
import useSWR from "swr";
|
||||
import { Loader } from "@plane/ui";
|
||||
// components
|
||||
import { PageHeader } from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// local
|
||||
@@ -18,7 +16,6 @@ const InstanceImagePage = observer(() => {
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Image - God Mode" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<div className="text-xl font-medium text-custom-text-100">Third-party image libraries</div>
|
||||
|
||||
@@ -1,84 +1,48 @@
|
||||
import { observer } from "mobx-react";
|
||||
import Image from "next/image";
|
||||
import { useTheme } from "next-themes";
|
||||
import { KeyRound, Mails } from "lucide-react";
|
||||
// types
|
||||
import { TInstanceAuthenticationMethodKeys, TInstanceAuthenticationModes } from "@plane/types";
|
||||
// components
|
||||
import {
|
||||
AuthenticationMethodCard,
|
||||
EmailCodesConfiguration,
|
||||
GithubConfiguration,
|
||||
GitlabConfiguration,
|
||||
GoogleConfiguration,
|
||||
PasswordLoginConfiguration,
|
||||
} from "@/components/authentication";
|
||||
TGetBaseAuthenticationModeProps,
|
||||
TInstanceAuthenticationMethodKeys,
|
||||
TInstanceAuthenticationModes,
|
||||
} from "@plane/types";
|
||||
// components
|
||||
import { AuthenticationMethodCard } from "@/components/authentication";
|
||||
// helpers
|
||||
import { resolveGeneralTheme } from "@/helpers/common.helper";
|
||||
import { UpgradeButton } from "@/components/common/upgrade-button";
|
||||
import { getBaseAuthenticationModes } from "@/helpers/authentication.helper";
|
||||
// images
|
||||
import githubLightModeImage from "@/public/logos/github-black.png";
|
||||
import githubDarkModeImage from "@/public/logos/github-white.png";
|
||||
import GitlabLogo from "@/public/logos/gitlab-logo.svg";
|
||||
import GoogleLogo from "@/public/logos/google-logo.svg";
|
||||
import OIDCLogo from "@/public/logos/oidc-logo.svg";
|
||||
import SAMLLogo from "@/public/logos/saml-logo.svg";
|
||||
|
||||
export type TAuthenticationModeProps = {
|
||||
disabled: boolean;
|
||||
updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
|
||||
};
|
||||
|
||||
export type TGetAuthenticationModeProps = {
|
||||
disabled: boolean;
|
||||
updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
|
||||
resolvedTheme: string | undefined;
|
||||
};
|
||||
|
||||
// Authentication methods
|
||||
export const getAuthenticationModes: (props: TGetAuthenticationModeProps) => TInstanceAuthenticationModes[] = ({
|
||||
export const getAuthenticationModes: (props: TGetBaseAuthenticationModeProps) => TInstanceAuthenticationModes[] = ({
|
||||
disabled,
|
||||
updateConfig,
|
||||
resolvedTheme,
|
||||
}) => [
|
||||
...getBaseAuthenticationModes({ disabled, updateConfig, resolvedTheme }),
|
||||
{
|
||||
key: "email-codes",
|
||||
name: "Email codes",
|
||||
description: "Login or sign up using codes sent via emails. You need to have email setup here and enabled.",
|
||||
icon: <Mails className="h-6 w-6 p-0.5 text-custom-text-300/80" />,
|
||||
config: <EmailCodesConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
key: "oidc",
|
||||
name: "OIDC",
|
||||
description: "Authenticate your users via the OpenID Connect protocol.",
|
||||
icon: <Image src={OIDCLogo} height={22} width={22} alt="OIDC Logo" />,
|
||||
config: <UpgradeButton />,
|
||||
unavailable: true,
|
||||
},
|
||||
{
|
||||
key: "password-login",
|
||||
name: "Password based login",
|
||||
description: "Allow members to create accounts with passwords for emails to sign in.",
|
||||
icon: <KeyRound className="h-6 w-6 p-0.5 text-custom-text-300/80" />,
|
||||
config: <PasswordLoginConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "google",
|
||||
name: "Google",
|
||||
description: "Allow members to login or sign up to plane with their Google accounts.",
|
||||
icon: <Image src={GoogleLogo} height={20} width={20} alt="Google Logo" />,
|
||||
config: <GoogleConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "github",
|
||||
name: "Github",
|
||||
description: "Allow members to login or sign up to plane with their Github accounts.",
|
||||
icon: (
|
||||
<Image
|
||||
src={resolveGeneralTheme(resolvedTheme) === "dark" ? githubDarkModeImage : githubLightModeImage}
|
||||
height={20}
|
||||
width={20}
|
||||
alt="GitHub Logo"
|
||||
/>
|
||||
),
|
||||
config: <GithubConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "gitlab",
|
||||
name: "GitLab",
|
||||
description: "Allow members to login or sign up to plane with their GitLab accounts.",
|
||||
icon: <Image src={GitlabLogo} height={20} width={20} alt="GitLab Logo" />,
|
||||
config: <GitlabConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
key: "saml",
|
||||
name: "SAML",
|
||||
description: "Authenticate your users via the Security Assertion Markup Language protocol.",
|
||||
icon: <Image src={SAMLLogo} height={22} width={22} alt="SAML Logo" className="pl-0.5" />,
|
||||
config: <UpgradeButton />,
|
||||
unavailable: true,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -97,6 +61,7 @@ export const AuthenticationModes: React.FC<TAuthenticationModeProps> = observer(
|
||||
icon={method.icon}
|
||||
config={method.config}
|
||||
disabled={disabled}
|
||||
unavailable={method.unavailable}
|
||||
/>
|
||||
))}
|
||||
</>
|
||||
|
||||
@@ -41,10 +41,10 @@ export const InstanceSidebar: FC<IInstanceSidebar> = observer(() => {
|
||||
<div
|
||||
className={`inset-y-0 z-20 flex h-full flex-shrink-0 flex-grow-0 flex-col border-r border-custom-sidebar-border-200 bg-custom-sidebar-background-100 duration-300
|
||||
fixed md:relative
|
||||
${isSidebarCollapsed ? "-ml-[280px]" : ""}
|
||||
sm:${isSidebarCollapsed ? "-ml-[280px]" : ""}
|
||||
md:ml-0 ${isSidebarCollapsed ? "w-[70px]" : "w-[280px]"}
|
||||
lg:ml-0 ${isSidebarCollapsed ? "w-[70px]" : "w-[280px]"}
|
||||
${isSidebarCollapsed ? "-ml-[290px]" : ""}
|
||||
sm:${isSidebarCollapsed ? "-ml-[290px]" : ""}
|
||||
md:ml-0 ${isSidebarCollapsed ? "w-[70px]" : "w-[290px]"}
|
||||
lg:ml-0 ${isSidebarCollapsed ? "w-[70px]" : "w-[290px]"}
|
||||
`}
|
||||
>
|
||||
<div ref={ref} className="flex h-full w-full flex-1 flex-col">
|
||||
|
||||
@@ -11,10 +11,11 @@ type Props = {
|
||||
config: JSX.Element;
|
||||
disabled?: boolean;
|
||||
withBorder?: boolean;
|
||||
unavailable?: boolean;
|
||||
};
|
||||
|
||||
export const AuthenticationMethodCard: FC<Props> = (props) => {
|
||||
const { name, description, icon, config, disabled = false, withBorder = true } = props;
|
||||
const { name, description, icon, config, disabled = false, withBorder = true, unavailable = false } = props;
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -22,7 +23,11 @@ export const AuthenticationMethodCard: FC<Props> = (props) => {
|
||||
"px-4 py-3 border border-custom-border-200": withBorder,
|
||||
})}
|
||||
>
|
||||
<div className="flex grow items-center gap-4">
|
||||
<div
|
||||
className={cn("flex grow items-center gap-4", {
|
||||
"opacity-50": unavailable,
|
||||
})}
|
||||
>
|
||||
<div className="shrink-0">
|
||||
<div className="flex h-10 w-10 items-center justify-center rounded-full bg-custom-background-80">{icon}</div>
|
||||
</div>
|
||||
|
||||
21
admin/core/components/common/code-block.tsx
Normal file
21
admin/core/components/common/code-block.tsx
Normal file
@@ -0,0 +1,21 @@
|
||||
import { cn } from "@/helpers/common.helper";
|
||||
|
||||
type TProps = {
|
||||
children: React.ReactNode;
|
||||
className?: string;
|
||||
darkerShade?: boolean;
|
||||
};
|
||||
|
||||
export const CodeBlock = ({ children, className, darkerShade }: TProps) => (
|
||||
<span
|
||||
className={cn(
|
||||
"px-0.5 text-xs text-custom-text-300 bg-custom-background-90 font-semibold rounded-md border border-custom-border-100",
|
||||
{
|
||||
"text-custom-text-200 bg-custom-background-80 border-custom-border-200": darkerShade,
|
||||
},
|
||||
className
|
||||
)}
|
||||
>
|
||||
{children}
|
||||
</span>
|
||||
);
|
||||
@@ -38,7 +38,7 @@ export const ControllerInput: React.FC<Props> = (props) => {
|
||||
return (
|
||||
<div className="flex flex-col gap-1">
|
||||
<h4 className="text-sm text-custom-text-300">
|
||||
{label} {!required && "(optional)"}
|
||||
{label}
|
||||
</h4>
|
||||
<div className="relative">
|
||||
<Controller
|
||||
@@ -80,7 +80,7 @@ export const ControllerInput: React.FC<Props> = (props) => {
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
{description && <p className="text-xs text-custom-text-300">{description}</p>}
|
||||
{description && <p className="pt-0.5 text-xs text-custom-text-300">{description}</p>}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -7,3 +7,5 @@ export * from "./banner";
|
||||
export * from "./empty-state";
|
||||
export * from "./logo-spinner";
|
||||
export * from "./page-header";
|
||||
export * from "./code-block";
|
||||
export * from "./upgrade-button";
|
||||
|
||||
16
admin/core/components/common/upgrade-button.tsx
Normal file
16
admin/core/components/common/upgrade-button.tsx
Normal file
@@ -0,0 +1,16 @@
|
||||
"use client";
|
||||
|
||||
import React from "react";
|
||||
// icons
|
||||
import { SquareArrowOutUpRight } from "lucide-react";
|
||||
// ui
|
||||
import { getButtonStyling } from "@plane/ui";
|
||||
// helpers
|
||||
import { cn } from "@/helpers/common.helper";
|
||||
|
||||
export const UpgradeButton: React.FC = () => (
|
||||
<a href="https://plane.so/one" target="_blank" className={cn(getButtonStyling("primary", "sm"))}>
|
||||
Available on One
|
||||
<SquareArrowOutUpRight className="h-3.5 w-3.5 p-0.5" />
|
||||
</a>
|
||||
);
|
||||
@@ -1,7 +1,24 @@
|
||||
import { ReactNode } from "react";
|
||||
import Image from "next/image";
|
||||
import Link from "next/link";
|
||||
import { KeyRound, Mails } from "lucide-react";
|
||||
// types
|
||||
import { TGetBaseAuthenticationModeProps, TInstanceAuthenticationModes } from "@plane/types";
|
||||
// components
|
||||
import {
|
||||
EmailCodesConfiguration,
|
||||
GithubConfiguration,
|
||||
GitlabConfiguration,
|
||||
GoogleConfiguration,
|
||||
PasswordLoginConfiguration,
|
||||
} from "@/components/authentication";
|
||||
// helpers
|
||||
import { SUPPORT_EMAIL } from "./common.helper";
|
||||
import { SUPPORT_EMAIL, resolveGeneralTheme } from "@/helpers/common.helper";
|
||||
// images
|
||||
import githubLightModeImage from "@/public/logos/github-black.png";
|
||||
import githubDarkModeImage from "@/public/logos/github-white.png";
|
||||
import GitlabLogo from "@/public/logos/gitlab-logo.svg";
|
||||
import GoogleLogo from "@/public/logos/google-logo.svg";
|
||||
|
||||
export enum EPageTypes {
|
||||
PUBLIC = "PUBLIC",
|
||||
@@ -134,3 +151,53 @@ export const authErrorHandler = (
|
||||
|
||||
return undefined;
|
||||
};
|
||||
|
||||
export const getBaseAuthenticationModes: (props: TGetBaseAuthenticationModeProps) => TInstanceAuthenticationModes[] = ({
|
||||
disabled,
|
||||
updateConfig,
|
||||
resolvedTheme,
|
||||
}) => [
|
||||
{
|
||||
key: "unique-codes",
|
||||
name: "Unique codes",
|
||||
description:
|
||||
"Log in or sign up for Plane using codes sent via email. You need to have set up SMTP to use this method.",
|
||||
icon: <Mails className="h-6 w-6 p-0.5 text-custom-text-300/80" />,
|
||||
config: <EmailCodesConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "passwords-login",
|
||||
name: "Passwords",
|
||||
description: "Allow members to create accounts with passwords and use it with their email addresses to sign in.",
|
||||
icon: <KeyRound className="h-6 w-6 p-0.5 text-custom-text-300/80" />,
|
||||
config: <PasswordLoginConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "google",
|
||||
name: "Google",
|
||||
description: "Allow members to log in or sign up for Plane with their Google accounts.",
|
||||
icon: <Image src={GoogleLogo} height={20} width={20} alt="Google Logo" />,
|
||||
config: <GoogleConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "github",
|
||||
name: "GitHub",
|
||||
description: "Allow members to log in or sign up for Plane with their GitHub accounts.",
|
||||
icon: (
|
||||
<Image
|
||||
src={resolveGeneralTheme(resolvedTheme) === "dark" ? githubDarkModeImage : githubLightModeImage}
|
||||
height={20}
|
||||
width={20}
|
||||
alt="GitHub Logo"
|
||||
/>
|
||||
),
|
||||
config: <GithubConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "gitlab",
|
||||
name: "GitLab",
|
||||
description: "Allow members to log in or sign up to plane with their GitLab accounts.",
|
||||
icon: <Image src={GitlabLogo} height={20} width={20} alt="GitLab Logo" />,
|
||||
config: <GitlabConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "admin",
|
||||
"version": "0.21.0",
|
||||
"version": "0.22.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "turbo run develop",
|
||||
@@ -14,6 +14,7 @@
|
||||
"@headlessui/react": "^1.7.19",
|
||||
"@plane/types": "*",
|
||||
"@plane/ui": "*",
|
||||
"@plane/constants": "*",
|
||||
"@tailwindcss/typography": "^0.5.9",
|
||||
"@types/lodash": "^4.17.0",
|
||||
"autoprefixer": "10.4.14",
|
||||
@@ -46,4 +47,4 @@
|
||||
"tsconfig": "*",
|
||||
"typescript": "^5.4.2"
|
||||
}
|
||||
}
|
||||
}
|
||||
11
admin/public/logos/oidc-logo.svg
Normal file
11
admin/public/logos/oidc-logo.svg
Normal file
@@ -0,0 +1,11 @@
|
||||
<svg width="92" height="84" viewBox="0 0 92 84" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_3695_11896)">
|
||||
<path d="M83.0398 32.6876C74.2901 27.2397 62.0735 23.8553 48.7013 23.8553C21.7918 23.8553 0 37.3101 0 53.9016C0 69.0898 18.1598 81.554 41.685 83.7001V74.9504C25.8364 72.9693 13.95 64.3022 13.95 53.9016C13.95 42.0977 29.4684 32.44 48.7013 32.44C58.2765 32.44 66.9436 34.8338 73.217 38.7134L64.3022 44.2439H92.1197V27.0746L83.0398 32.6876Z" fill="#CCCCCC"/>
|
||||
<path d="M41.6846 8.99736V74.9504V83.7002L55.6346 74.9504V0L41.6846 8.99736Z" fill="#FF6200"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_3695_11896">
|
||||
<rect width="92" height="84" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 702 B |
17
admin/public/logos/saml-logo.svg
Normal file
17
admin/public/logos/saml-logo.svg
Normal file
@@ -0,0 +1,17 @@
|
||||
<svg width="700" height="650" viewBox="0 0 700 650" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_3262_5767)">
|
||||
<mask id="mask0_3262_5767" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="0" y="0" width="700" height="650">
|
||||
<path d="M700 0H0V650H700V0Z" fill="white"/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_3262_5767)">
|
||||
<path d="M337.682 0L360.832 20.5C377.982 35.7 395.182 50.85 412.132 66.25C521.982 166 614.982 278.25 684.982 407.45C688.582 414.05 691.832 420.85 695.082 427.6L699.982 437.75L694.582 440.6L690.532 434.85L680.032 419.9L672.682 409.2C621.732 335.25 570.682 261.2 500.582 201.95C479.373 183.995 455.969 168.807 430.932 156.75C380.232 132.5 335.132 142.2 296.432 182C259.632 219.85 240.532 266.85 223.282 314.65C221.032 320.8 218.682 326.9 216.332 333L212.232 343.75L203.632 341C208.632 323.6 213.232 306.1 217.832 288.55C228.332 248.8 238.832 209.05 253.432 170.75C268.932 129.95 288.532 90.6 308.082 51.25C316.532 34.2 324.982 17.15 333.082 0H337.682Z" fill="#C22E33"/>
|
||||
<path d="M372.382 491.1C291.082 529.6 94.3829 569.3 1.08287 559.1C-14.1671 478.8 135.482 102.5 208.982 45.5L204.232 56.4C202.115 61.531 199.813 66.5842 197.332 71.55L194.032 78C156.032 151.1 118.082 224.3 98.6329 304.5C91.6287 332.124 87.8038 360.458 87.2328 388.95C86.7328 455.95 128.432 501.55 198.082 504.4C231.582 505.75 265.432 502.25 299.232 498.7C313.932 497.2 328.582 495.65 343.232 494.5C348.632 494.1 353.932 493.45 360.832 492.55L372.382 491.15V491.1Z" fill="#C22E33"/>
|
||||
<path d="M141.233 639.05C118.983 640.75 96.733 642.45 74.583 644.45C279.433 663.95 476.083 630.6 670.083 562.25C606.833 450.75 521.583 362.7 422.483 286.15C423.783 291.05 426.683 294.6 429.533 298.1L431.933 301.1C440.433 312.4 449.333 323.5 458.283 334.6C478.733 360.05 499.183 385.5 514.583 413.5C553.483 484.5 532.383 545.9 456.183 578.3C406.083 599.65 351.333 614.2 297.183 622.9C245.683 631.1 193.433 635.05 141.233 639.05Z" fill="#C22E33"/>
|
||||
</g>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_3262_5767">
|
||||
<rect width="700" height="650" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.0 KiB |
@@ -1,3 +1,5 @@
|
||||
ARG BASE_TAG=develop
|
||||
ARG BUILD_TYPE=full
|
||||
# *****************************************************************************
|
||||
# STAGE 1: Build the project
|
||||
# *****************************************************************************
|
||||
@@ -5,7 +7,6 @@ FROM node:18-alpine AS builder
|
||||
RUN apk add --no-cache libc6-compat
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER
|
||||
|
||||
RUN yarn global add turbo
|
||||
COPY . .
|
||||
@@ -46,16 +47,18 @@ ENV NEXT_PUBLIC_SPACE_BASE_URL=$NEXT_PUBLIC_SPACE_BASE_URL
|
||||
ARG NEXT_PUBLIC_SPACE_BASE_PATH="/spaces"
|
||||
ENV NEXT_PUBLIC_SPACE_BASE_PATH=$NEXT_PUBLIC_SPACE_BASE_PATH
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
ENV TURBO_TELEMETRY_DISABLED 1
|
||||
ARG NEXT_PUBLIC_WEB_BASE_URL=""
|
||||
ENV NEXT_PUBLIC_WEB_BASE_URL=$NEXT_PUBLIC_WEB_BASE_URL
|
||||
|
||||
RUN yarn turbo run build
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
ENV TURBO_TELEMETRY_DISABLED=1
|
||||
|
||||
RUN yarn turbo run build --filter=web --filter=space --filter=admin
|
||||
|
||||
# *****************************************************************************
|
||||
# STAGE 3: Copy the project and start it
|
||||
# *****************************************************************************
|
||||
# FROM makeplane/plane-aio-base AS runner
|
||||
FROM makeplane/plane-aio-base:develop AS runner
|
||||
FROM makeplane/plane-aio-base:${BUILD_TYPE}-${BASE_TAG} AS runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -63,17 +66,14 @@ SHELL [ "/bin/bash", "-c" ]
|
||||
|
||||
# PYTHON APPLICATION SETUP
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
|
||||
COPY apiserver/requirements.txt ./api/
|
||||
COPY apiserver/requirements ./api/requirements
|
||||
|
||||
RUN python3.12 -m venv /app/venv && \
|
||||
source /app/venv/bin/activate && \
|
||||
/app/venv/bin/pip install --upgrade pip && \
|
||||
/app/venv/bin/pip install -r ./api/requirements.txt --compile --no-cache-dir
|
||||
RUN pip install -r ./api/requirements.txt --compile --no-cache-dir
|
||||
|
||||
# Add in Django deps and generate Django's static files
|
||||
COPY apiserver/manage.py ./api/manage.py
|
||||
@@ -87,7 +87,6 @@ RUN chmod +x ./api/bin/*
|
||||
RUN chmod -R 777 ./api/
|
||||
|
||||
# NEXTJS BUILDS
|
||||
|
||||
COPY --from=installer /app/web/next.config.js ./web/
|
||||
COPY --from=installer /app/web/package.json ./web/
|
||||
COPY --from=installer /app/web/.next/standalone ./web
|
||||
@@ -124,26 +123,63 @@ ENV NEXT_PUBLIC_SPACE_BASE_PATH=$NEXT_PUBLIC_SPACE_BASE_PATH
|
||||
ARG NEXT_PUBLIC_WEB_BASE_URL=""
|
||||
ENV NEXT_PUBLIC_WEB_BASE_URL=$NEXT_PUBLIC_WEB_BASE_URL
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
ENV TURBO_TELEMETRY_DISABLED 1
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
ENV TURBO_TELEMETRY_DISABLED=1
|
||||
|
||||
COPY aio/supervisord.conf /app/supervisord.conf
|
||||
ARG BUILD_TYPE=full
|
||||
ENV BUILD_TYPE=$BUILD_TYPE
|
||||
|
||||
COPY aio/aio.sh /app/aio.sh
|
||||
RUN chmod +x /app/aio.sh
|
||||
COPY aio/supervisord-${BUILD_TYPE}-base /app/supervisord.conf
|
||||
COPY aio/supervisord-app /app/supervisord-app
|
||||
RUN cat /app/supervisord-app >> /app/supervisord.conf && \
|
||||
rm /app/supervisord-app
|
||||
|
||||
COPY ./aio/nginx.conf /etc/nginx/nginx.conf.template
|
||||
|
||||
# if build type is full, run the below copy pg-setup.sh
|
||||
COPY aio/postgresql.conf /etc/postgresql/postgresql.conf
|
||||
COPY aio/pg-setup.sh /app/pg-setup.sh
|
||||
RUN chmod +x /app/pg-setup.sh
|
||||
|
||||
COPY deploy/selfhost/variables.env /app/plane.env
|
||||
# *****************************************************************************
|
||||
# APPLICATION ENVIRONMENT SETTINGS
|
||||
# *****************************************************************************
|
||||
ENV APP_DOMAIN=localhost
|
||||
|
||||
# NGINX Conf Copy
|
||||
COPY ./aio/nginx.conf.aio /etc/nginx/nginx.conf.template
|
||||
COPY ./nginx/env.sh /app/nginx-start.sh
|
||||
RUN chmod +x /app/nginx-start.sh
|
||||
ENV WEB_URL=http://${APP_DOMAIN}
|
||||
ENV DEBUG=0
|
||||
ENV SENTRY_DSN=
|
||||
ENV SENTRY_ENVIRONMENT=production
|
||||
ENV CORS_ALLOWED_ORIGINS=http://${APP_DOMAIN},https://${APP_DOMAIN}
|
||||
# Secret Key
|
||||
ENV SECRET_KEY=60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5
|
||||
# Gunicorn Workers
|
||||
ENV GUNICORN_WORKERS=1
|
||||
|
||||
RUN ./pg-setup.sh
|
||||
ENV POSTGRES_USER="plane"
|
||||
ENV POSTGRES_PASSWORD="plane"
|
||||
ENV POSTGRES_DB="plane"
|
||||
ENV POSTGRES_HOST="localhost"
|
||||
ENV POSTGRES_PORT="5432"
|
||||
ENV DATABASE_URL="postgresql://plane:plane@localhost:5432/plane"
|
||||
|
||||
VOLUME [ "/app/data/minio/uploads", "/var/lib/postgresql/data" ]
|
||||
ENV REDIS_HOST="localhost"
|
||||
ENV REDIS_PORT="6379"
|
||||
ENV REDIS_URL="redis://localhost:6379"
|
||||
|
||||
ENV USE_MINIO="1"
|
||||
ENV AWS_REGION=""
|
||||
ENV AWS_ACCESS_KEY_ID="access-key"
|
||||
ENV AWS_SECRET_ACCESS_KEY="secret-key"
|
||||
ENV AWS_S3_ENDPOINT_URL="http://localhost:9000"
|
||||
ENV AWS_S3_BUCKET_NAME="uploads"
|
||||
ENV MINIO_ROOT_USER="access-key"
|
||||
ENV MINIO_ROOT_PASSWORD="secret-key"
|
||||
ENV BUCKET_NAME="uploads"
|
||||
ENV FILE_SIZE_LIMIT="5242880"
|
||||
|
||||
# *****************************************************************************
|
||||
|
||||
RUN /app/pg-setup.sh
|
||||
|
||||
CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"]
|
||||
@@ -1,19 +1,28 @@
|
||||
FROM --platform=$BUILDPLATFORM tonistiigi/binfmt as binfmt
|
||||
FROM --platform=$BUILDPLATFORM tonistiigi/binfmt AS binfmt
|
||||
|
||||
FROM debian:12-slim
|
||||
FROM python:3.12-slim
|
||||
|
||||
# Set environment variables to non-interactive for apt
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV BUILD_TYPE=full
|
||||
|
||||
SHELL [ "/bin/bash", "-c" ]
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN mkdir -p /app/{data,logs} && \
|
||||
mkdir -p /app/data/{redis,pg,minio,nginx} && \
|
||||
mkdir -p /app/logs/{access,error} && \
|
||||
mkdir -p /etc/supervisor/conf.d
|
||||
|
||||
# Update the package list and install prerequisites
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
gnupg2 curl ca-certificates lsb-release software-properties-common \
|
||||
build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev \
|
||||
libsqlite3-dev wget llvm libncurses5-dev libncursesw5-dev xz-utils \
|
||||
tk-dev libffi-dev liblzma-dev supervisor nginx nano vim ncdu
|
||||
tk-dev libffi-dev liblzma-dev supervisor nginx nano vim ncdu \
|
||||
sudo lsof net-tools libpq-dev procps gettext
|
||||
|
||||
# Install Redis 7.2
|
||||
RUN echo "deb http://deb.debian.org/debian $(lsb_release -cs)-backports main" > /etc/apt/sources.list.d/backports.list && \
|
||||
@@ -23,13 +32,15 @@ RUN echo "deb http://deb.debian.org/debian $(lsb_release -cs)-backports main" >
|
||||
apt-get install -y redis-server
|
||||
|
||||
# Install PostgreSQL 15
|
||||
ENV POSTGRES_VERSION 15
|
||||
ENV POSTGRES_VERSION=15
|
||||
RUN curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /usr/share/keyrings/pgdg-archive-keyring.gpg && \
|
||||
echo "deb [signed-by=/usr/share/keyrings/pgdg-archive-keyring.gpg] http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
||||
apt-get update && \
|
||||
apt-get install -y postgresql-$POSTGRES_VERSION postgresql-client-$POSTGRES_VERSION && \
|
||||
mkdir -p /var/lib/postgresql/data && \
|
||||
chown -R postgres:postgres /var/lib/postgresql
|
||||
COPY postgresql.conf /etc/postgresql/postgresql.conf
|
||||
RUN sudo -u postgres /usr/lib/postgresql/$POSTGRES_VERSION/bin/initdb -D /var/lib/postgresql/data
|
||||
|
||||
# Install MinIO
|
||||
ARG TARGETARCH
|
||||
@@ -42,51 +53,21 @@ RUN if [ "$TARGETARCH" = "amd64" ]; then \
|
||||
fi && \
|
||||
chmod +x /usr/local/bin/minio
|
||||
|
||||
|
||||
# Install Node.js 18
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \
|
||||
apt-get install -y nodejs
|
||||
|
||||
# Install Python 3.12 from source
|
||||
RUN cd /usr/src && \
|
||||
wget https://www.python.org/ftp/python/3.12.0/Python-3.12.0.tgz && \
|
||||
tar xzf Python-3.12.0.tgz && \
|
||||
cd Python-3.12.0 && \
|
||||
./configure --enable-optimizations && \
|
||||
make altinstall && \
|
||||
rm -f /usr/src/Python-3.12.0.tgz
|
||||
|
||||
RUN python3.12 -m pip install --upgrade pip
|
||||
|
||||
RUN echo "alias python=/usr/local/bin/python3.12" >> ~/.bashrc && \
|
||||
echo "alias pip=/usr/local/bin/pip3.12" >> ~/.bashrc
|
||||
|
||||
# Clean up
|
||||
RUN apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* /usr/src/Python-3.12.0
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN mkdir -p /app/{data,logs} && \
|
||||
mkdir -p /app/data/{redis,pg,minio,nginx} && \
|
||||
mkdir -p /app/logs/{access,error} && \
|
||||
mkdir -p /etc/supervisor/conf.d
|
||||
apt-get install -y nodejs && \
|
||||
python -m pip install --upgrade pip && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create Supervisor configuration file
|
||||
COPY supervisord.base /app/supervisord.conf
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y sudo lsof net-tools libpq-dev procps gettext && \
|
||||
apt-get clean
|
||||
|
||||
RUN sudo -u postgres /usr/lib/postgresql/$POSTGRES_VERSION/bin/initdb -D /var/lib/postgresql/data
|
||||
COPY postgresql.conf /etc/postgresql/postgresql.conf
|
||||
|
||||
RUN echo "alias python=/usr/local/bin/python3.12" >> ~/.bashrc && \
|
||||
echo "alias pip=/usr/local/bin/pip3.12" >> ~/.bashrc
|
||||
COPY supervisord-full-base /app/supervisord.conf
|
||||
COPY nginx.conf /etc/nginx/nginx.conf.template
|
||||
COPY env.sh /app/nginx-start.sh
|
||||
RUN chmod +x /app/nginx-start.sh
|
||||
|
||||
# Expose ports for Redis, PostgreSQL, and MinIO
|
||||
EXPOSE 6379 5432 9000 80
|
||||
EXPOSE 6379 5432 9000 80 443
|
||||
|
||||
# Start Supervisor
|
||||
CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"]
|
||||
45
aio/Dockerfile-base-slim
Normal file
45
aio/Dockerfile-base-slim
Normal file
@@ -0,0 +1,45 @@
|
||||
FROM --platform=$BUILDPLATFORM tonistiigi/binfmt AS binfmt
|
||||
|
||||
FROM python:3.12-slim
|
||||
|
||||
# Set environment variables to non-interactive for apt
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV BUILD_TYPE=slim
|
||||
|
||||
SHELL [ "/bin/bash", "-c" ]
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN mkdir -p /app/{data,logs} && \
|
||||
mkdir -p /app/data/{nginx} && \
|
||||
mkdir -p /app/logs/{access,error} && \
|
||||
mkdir -p /etc/supervisor/conf.d
|
||||
|
||||
# Update the package list and install prerequisites
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
gnupg2 curl ca-certificates lsb-release software-properties-common \
|
||||
build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev \
|
||||
libsqlite3-dev wget llvm libncurses5-dev libncursesw5-dev xz-utils \
|
||||
tk-dev libffi-dev liblzma-dev supervisor nginx nano vim ncdu \
|
||||
sudo lsof net-tools libpq-dev procps gettext
|
||||
|
||||
# Install Node.js 18
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \
|
||||
apt-get install -y nodejs
|
||||
|
||||
RUN python -m pip install --upgrade pip && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create Supervisor configuration file
|
||||
COPY supervisord-slim-base /app/supervisord.conf
|
||||
COPY nginx.conf /etc/nginx/nginx.conf.template
|
||||
COPY env.sh /app/nginx-start.sh
|
||||
RUN chmod +x /app/nginx-start.sh
|
||||
|
||||
# Expose ports for Redis, PostgreSQL, and MinIO
|
||||
EXPOSE 80 443
|
||||
|
||||
# Start Supervisor
|
||||
CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"]
|
||||
30
aio/aio.sh
30
aio/aio.sh
@@ -1,30 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
|
||||
if [ "$1" = 'api' ]; then
|
||||
source /app/venv/bin/activate
|
||||
cd /app/api
|
||||
exec ./bin/docker-entrypoint-api.sh
|
||||
elif [ "$1" = 'worker' ]; then
|
||||
source /app/venv/bin/activate
|
||||
cd /app/api
|
||||
exec ./bin/docker-entrypoint-worker.sh
|
||||
elif [ "$1" = 'beat' ]; then
|
||||
source /app/venv/bin/activate
|
||||
cd /app/api
|
||||
exec ./bin/docker-entrypoint-beat.sh
|
||||
elif [ "$1" = 'migrator' ]; then
|
||||
source /app/venv/bin/activate
|
||||
cd /app/api
|
||||
exec ./bin/docker-entrypoint-migrator.sh
|
||||
elif [ "$1" = 'web' ]; then
|
||||
node /app/web/web/server.js
|
||||
elif [ "$1" = 'space' ]; then
|
||||
node /app/space/space/server.js
|
||||
elif [ "$1" = 'admin' ]; then
|
||||
node /app/admin/admin/server.js
|
||||
else
|
||||
echo "Command not found"
|
||||
exit 1
|
||||
fi
|
||||
7
aio/env.sh
Normal file
7
aio/env.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
export dollar="$"
|
||||
export http_upgrade="http_upgrade"
|
||||
export scheme="scheme"
|
||||
envsubst < /etc/nginx/nginx.conf.template > /etc/nginx/nginx.conf
|
||||
exec nginx -g 'daemon off;'
|
||||
@@ -37,7 +37,6 @@ http {
|
||||
proxy_pass http://localhost:3002/spaces/;
|
||||
}
|
||||
|
||||
|
||||
location /god-mode/ {
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade ${dollar}http_upgrade;
|
||||
@@ -1,14 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ "$BUILD_TYPE" == "full" ]; then
|
||||
|
||||
# Variables
|
||||
set -o allexport
|
||||
source plane.env set
|
||||
set +o allexport
|
||||
export PGHOST=localhost
|
||||
|
||||
export PGHOST=localhost
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/pg_ctl" -D /var/lib/postgresql/data start
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/psql" --command "CREATE USER $POSTGRES_USER WITH SUPERUSER PASSWORD '$POSTGRES_PASSWORD';" && \
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/createdb" -O "$POSTGRES_USER" "$POSTGRES_DB" && \
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/psql" --command "GRANT ALL PRIVILEGES ON DATABASE $POSTGRES_DB TO $POSTGRES_USER;" && \
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/pg_ctl" -D /var/lib/postgresql/data stop
|
||||
|
||||
fi
|
||||
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/pg_ctl" -D /var/lib/postgresql/data start
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/psql" --command "CREATE USER $POSTGRES_USER WITH SUPERUSER PASSWORD '$POSTGRES_PASSWORD';" && \
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/createdb" -O "$POSTGRES_USER" "$POSTGRES_DB" && \
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/pg_ctl" -D /var/lib/postgresql/data stop
|
||||
|
||||
@@ -1,12 +1,815 @@
|
||||
# -----------------------------
|
||||
# PostgreSQL configuration file
|
||||
# -----------------------------
|
||||
#
|
||||
# This file consists of lines of the form:
|
||||
#
|
||||
# name = value
|
||||
#
|
||||
# (The "=" is optional.) Whitespace may be used. Comments are introduced with
|
||||
# "#" anywhere on a line. The complete list of parameter names and allowed
|
||||
# values can be found in the PostgreSQL documentation.
|
||||
#
|
||||
# The commented-out settings shown in this file represent the default values.
|
||||
# Re-commenting a setting is NOT sufficient to revert it to the default value;
|
||||
# you need to reload the server.
|
||||
#
|
||||
# This file is read on server startup and when the server receives a SIGHUP
|
||||
# signal. If you edit the file on a running system, you have to SIGHUP the
|
||||
# server for the changes to take effect, run "pg_ctl reload", or execute
|
||||
# "SELECT pg_reload_conf()". Some parameters, which are marked below,
|
||||
# require a server shutdown and restart to take effect.
|
||||
#
|
||||
# Any parameter can also be given as a command-line option to the server, e.g.,
|
||||
# "postgres -c log_connections=on". Some parameters can be changed at run time
|
||||
# with the "SET" SQL command.
|
||||
#
|
||||
# Memory units: B = bytes Time units: us = microseconds
|
||||
# kB = kilobytes ms = milliseconds
|
||||
# MB = megabytes s = seconds
|
||||
# GB = gigabytes min = minutes
|
||||
# TB = terabytes h = hours
|
||||
# d = days
|
||||
|
||||
# Allow connections from any IP address
|
||||
listen_addresses = '*'
|
||||
|
||||
# Set the maximum number of connections
|
||||
max_connections = 100
|
||||
#------------------------------------------------------------------------------
|
||||
# FILE LOCATIONS
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# Set the shared buffers size
|
||||
shared_buffers = 128MB
|
||||
# The default values of these variables are driven from the -D command-line
|
||||
# option or PGDATA environment variable, represented here as ConfigDir.
|
||||
|
||||
# Other custom configurations can be added here
|
||||
data_directory = '/var/lib/postgresql/data' # use data in another directory
|
||||
# (change requires restart)
|
||||
hba_file = '/etc/postgresql/15/main/pg_hba.conf' # host-based authentication file
|
||||
# (change requires restart)
|
||||
ident_file = '/etc/postgresql/15/main/pg_ident.conf' # ident configuration file
|
||||
# (change requires restart)
|
||||
|
||||
# If external_pid_file is not explicitly set, no extra PID file is written.
|
||||
external_pid_file = '/var/run/postgresql/15-main.pid' # write an extra PID file
|
||||
# (change requires restart)
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# CONNECTIONS AND AUTHENTICATION
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Connection Settings -
|
||||
|
||||
listen_addresses = 'localhost' # what IP address(es) to listen on;
|
||||
# comma-separated list of addresses;
|
||||
# defaults to 'localhost'; use '*' for all
|
||||
# (change requires restart)
|
||||
port = 5432 # (change requires restart)
|
||||
max_connections = 200 # (change requires restart)
|
||||
#superuser_reserved_connections = 3 # (change requires restart)
|
||||
unix_socket_directories = '/var/run/postgresql' # comma-separated list of directories
|
||||
# (change requires restart)
|
||||
#unix_socket_group = '' # (change requires restart)
|
||||
#unix_socket_permissions = 0777 # begin with 0 to use octal notation
|
||||
# (change requires restart)
|
||||
#bonjour = off # advertise server via Bonjour
|
||||
# (change requires restart)
|
||||
#bonjour_name = '' # defaults to the computer name
|
||||
# (change requires restart)
|
||||
|
||||
# - TCP settings -
|
||||
# see "man tcp" for details
|
||||
|
||||
#tcp_keepalives_idle = 0 # TCP_KEEPIDLE, in seconds;
|
||||
# 0 selects the system default
|
||||
#tcp_keepalives_interval = 0 # TCP_KEEPINTVL, in seconds;
|
||||
# 0 selects the system default
|
||||
#tcp_keepalives_count = 0 # TCP_KEEPCNT;
|
||||
# 0 selects the system default
|
||||
#tcp_user_timeout = 0 # TCP_USER_TIMEOUT, in milliseconds;
|
||||
# 0 selects the system default
|
||||
|
||||
#client_connection_check_interval = 0 # time between checks for client
|
||||
# disconnection while running queries;
|
||||
# 0 for never
|
||||
|
||||
# - Authentication -
|
||||
|
||||
#authentication_timeout = 1min # 1s-600s
|
||||
#password_encryption = scram-sha-256 # scram-sha-256 or md5
|
||||
#db_user_namespace = off
|
||||
|
||||
# GSSAPI using Kerberos
|
||||
#krb_server_keyfile = 'FILE:${sysconfdir}/krb5.keytab'
|
||||
#krb_caseins_users = off
|
||||
|
||||
# - SSL -
|
||||
|
||||
ssl = on
|
||||
#ssl_ca_file = ''
|
||||
ssl_cert_file = '/etc/ssl/certs/ssl-cert-snakeoil.pem'
|
||||
#ssl_crl_file = ''
|
||||
#ssl_crl_dir = ''
|
||||
ssl_key_file = '/etc/ssl/private/ssl-cert-snakeoil.key'
|
||||
#ssl_ciphers = 'HIGH:MEDIUM:+3DES:!aNULL' # allowed SSL ciphers
|
||||
#ssl_prefer_server_ciphers = on
|
||||
#ssl_ecdh_curve = 'prime256v1'
|
||||
#ssl_min_protocol_version = 'TLSv1.2'
|
||||
#ssl_max_protocol_version = ''
|
||||
#ssl_dh_params_file = ''
|
||||
#ssl_passphrase_command = ''
|
||||
#ssl_passphrase_command_supports_reload = off
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# RESOURCE USAGE (except WAL)
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Memory -
|
||||
|
||||
shared_buffers = 256MB # min 128kB
|
||||
# (change requires restart)
|
||||
#huge_pages = try # on, off, or try
|
||||
# (change requires restart)
|
||||
#huge_page_size = 0 # zero for system default
|
||||
# (change requires restart)
|
||||
#temp_buffers = 8MB # min 800kB
|
||||
#max_prepared_transactions = 0 # zero disables the feature
|
||||
# (change requires restart)
|
||||
# Caution: it is not advisable to set max_prepared_transactions nonzero unless
|
||||
# you actively intend to use prepared transactions.
|
||||
#work_mem = 4MB # min 64kB
|
||||
#hash_mem_multiplier = 2.0 # 1-1000.0 multiplier on hash table work_mem
|
||||
#maintenance_work_mem = 64MB # min 1MB
|
||||
#autovacuum_work_mem = -1 # min 1MB, or -1 to use maintenance_work_mem
|
||||
#logical_decoding_work_mem = 64MB # min 64kB
|
||||
#max_stack_depth = 2MB # min 100kB
|
||||
#shared_memory_type = mmap # the default is the first option
|
||||
# supported by the operating system:
|
||||
# mmap
|
||||
# sysv
|
||||
# windows
|
||||
# (change requires restart)
|
||||
dynamic_shared_memory_type = posix # the default is usually the first option
|
||||
# supported by the operating system:
|
||||
# posix
|
||||
# sysv
|
||||
# windows
|
||||
# mmap
|
||||
# (change requires restart)
|
||||
#min_dynamic_shared_memory = 0MB # (change requires restart)
|
||||
|
||||
# - Disk -
|
||||
|
||||
#temp_file_limit = -1 # limits per-process temp file space
|
||||
# in kilobytes, or -1 for no limit
|
||||
|
||||
# - Kernel Resources -
|
||||
|
||||
#max_files_per_process = 1000 # min 64
|
||||
# (change requires restart)
|
||||
|
||||
# - Cost-Based Vacuum Delay -
|
||||
|
||||
#vacuum_cost_delay = 0 # 0-100 milliseconds (0 disables)
|
||||
#vacuum_cost_page_hit = 1 # 0-10000 credits
|
||||
#vacuum_cost_page_miss = 2 # 0-10000 credits
|
||||
#vacuum_cost_page_dirty = 20 # 0-10000 credits
|
||||
#vacuum_cost_limit = 200 # 1-10000 credits
|
||||
|
||||
# - Background Writer -
|
||||
|
||||
#bgwriter_delay = 200ms # 10-10000ms between rounds
|
||||
#bgwriter_lru_maxpages = 100 # max buffers written/round, 0 disables
|
||||
#bgwriter_lru_multiplier = 2.0 # 0-10.0 multiplier on buffers scanned/round
|
||||
#bgwriter_flush_after = 512kB # measured in pages, 0 disables
|
||||
|
||||
# - Asynchronous Behavior -
|
||||
|
||||
#backend_flush_after = 0 # measured in pages, 0 disables
|
||||
#effective_io_concurrency = 1 # 1-1000; 0 disables prefetching
|
||||
#maintenance_io_concurrency = 10 # 1-1000; 0 disables prefetching
|
||||
#max_worker_processes = 8 # (change requires restart)
|
||||
#max_parallel_workers_per_gather = 2 # limited by max_parallel_workers
|
||||
#max_parallel_maintenance_workers = 2 # limited by max_parallel_workers
|
||||
#max_parallel_workers = 8 # number of max_worker_processes that
|
||||
# can be used in parallel operations
|
||||
#parallel_leader_participation = on
|
||||
#old_snapshot_threshold = -1 # 1min-60d; -1 disables; 0 is immediate
|
||||
# (change requires restart)
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# WRITE-AHEAD LOG
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Settings -
|
||||
|
||||
#wal_level = replica # minimal, replica, or logical
|
||||
# (change requires restart)
|
||||
#fsync = on # flush data to disk for crash safety
|
||||
# (turning this off can cause
|
||||
# unrecoverable data corruption)
|
||||
#synchronous_commit = on # synchronization level;
|
||||
# off, local, remote_write, remote_apply, or on
|
||||
#wal_sync_method = fsync # the default is the first option
|
||||
# supported by the operating system:
|
||||
# open_datasync
|
||||
# fdatasync (default on Linux and FreeBSD)
|
||||
# fsync
|
||||
# fsync_writethrough
|
||||
# open_sync
|
||||
#full_page_writes = on # recover from partial page writes
|
||||
#wal_log_hints = off # also do full page writes of non-critical updates
|
||||
# (change requires restart)
|
||||
#wal_compression = off # enables compression of full-page writes;
|
||||
# off, pglz, lz4, zstd, or on
|
||||
#wal_init_zero = on # zero-fill new WAL files
|
||||
#wal_recycle = on # recycle WAL files
|
||||
#wal_buffers = -1 # min 32kB, -1 sets based on shared_buffers
|
||||
# (change requires restart)
|
||||
#wal_writer_delay = 200ms # 1-10000 milliseconds
|
||||
#wal_writer_flush_after = 1MB # measured in pages, 0 disables
|
||||
#wal_skip_threshold = 2MB
|
||||
|
||||
#commit_delay = 0 # range 0-100000, in microseconds
|
||||
#commit_siblings = 5 # range 1-1000
|
||||
|
||||
# - Checkpoints -
|
||||
|
||||
#checkpoint_timeout = 5min # range 30s-1d
|
||||
#checkpoint_completion_target = 0.9 # checkpoint target duration, 0.0 - 1.0
|
||||
#checkpoint_flush_after = 256kB # measured in pages, 0 disables
|
||||
#checkpoint_warning = 30s # 0 disables
|
||||
max_wal_size = 1GB
|
||||
min_wal_size = 80MB
|
||||
|
||||
# - Prefetching during recovery -
|
||||
|
||||
#recovery_prefetch = try # prefetch pages referenced in the WAL?
|
||||
#wal_decode_buffer_size = 512kB # lookahead window used for prefetching
|
||||
# (change requires restart)
|
||||
|
||||
# - Archiving -
|
||||
|
||||
#archive_mode = off # enables archiving; off, on, or always
|
||||
# (change requires restart)
|
||||
#archive_library = '' # library to use to archive a logfile segment
|
||||
# (empty string indicates archive_command should
|
||||
# be used)
|
||||
#archive_command = '' # command to use to archive a logfile segment
|
||||
# placeholders: %p = path of file to archive
|
||||
# %f = file name only
|
||||
# e.g. 'test ! -f /mnt/server/archivedir/%f && cp %p /mnt/server/archivedir/%f'
|
||||
#archive_timeout = 0 # force a logfile segment switch after this
|
||||
# number of seconds; 0 disables
|
||||
|
||||
# - Archive Recovery -
|
||||
|
||||
# These are only used in recovery mode.
|
||||
|
||||
#restore_command = '' # command to use to restore an archived logfile segment
|
||||
# placeholders: %p = path of file to restore
|
||||
# %f = file name only
|
||||
# e.g. 'cp /mnt/server/archivedir/%f %p'
|
||||
#archive_cleanup_command = '' # command to execute at every restartpoint
|
||||
#recovery_end_command = '' # command to execute at completion of recovery
|
||||
|
||||
# - Recovery Target -
|
||||
|
||||
# Set these only when performing a targeted recovery.
|
||||
|
||||
#recovery_target = '' # 'immediate' to end recovery as soon as a
|
||||
# consistent state is reached
|
||||
# (change requires restart)
|
||||
#recovery_target_name = '' # the named restore point to which recovery will proceed
|
||||
# (change requires restart)
|
||||
#recovery_target_time = '' # the time stamp up to which recovery will proceed
|
||||
# (change requires restart)
|
||||
#recovery_target_xid = '' # the transaction ID up to which recovery will proceed
|
||||
# (change requires restart)
|
||||
#recovery_target_lsn = '' # the WAL LSN up to which recovery will proceed
|
||||
# (change requires restart)
|
||||
#recovery_target_inclusive = on # Specifies whether to stop:
|
||||
# just after the specified recovery target (on)
|
||||
# just before the recovery target (off)
|
||||
# (change requires restart)
|
||||
#recovery_target_timeline = 'latest' # 'current', 'latest', or timeline ID
|
||||
# (change requires restart)
|
||||
#recovery_target_action = 'pause' # 'pause', 'promote', 'shutdown'
|
||||
# (change requires restart)
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# REPLICATION
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Sending Servers -
|
||||
|
||||
# Set these on the primary and on any standby that will send replication data.
|
||||
|
||||
#max_wal_senders = 10 # max number of walsender processes
|
||||
# (change requires restart)
|
||||
#max_replication_slots = 10 # max number of replication slots
|
||||
# (change requires restart)
|
||||
#wal_keep_size = 0 # in megabytes; 0 disables
|
||||
#max_slot_wal_keep_size = -1 # in megabytes; -1 disables
|
||||
#wal_sender_timeout = 60s # in milliseconds; 0 disables
|
||||
#track_commit_timestamp = off # collect timestamp of transaction commit
|
||||
# (change requires restart)
|
||||
|
||||
# - Primary Server -
|
||||
|
||||
# These settings are ignored on a standby server.
|
||||
|
||||
#synchronous_standby_names = '' # standby servers that provide sync rep
|
||||
# method to choose sync standbys, number of sync standbys,
|
||||
# and comma-separated list of application_name
|
||||
# from standby(s); '*' = all
|
||||
#vacuum_defer_cleanup_age = 0 # number of xacts by which cleanup is delayed
|
||||
|
||||
# - Standby Servers -
|
||||
|
||||
# These settings are ignored on a primary server.
|
||||
|
||||
#primary_conninfo = '' # connection string to sending server
|
||||
#primary_slot_name = '' # replication slot on sending server
|
||||
#promote_trigger_file = '' # file name whose presence ends recovery
|
||||
#hot_standby = on # "off" disallows queries during recovery
|
||||
# (change requires restart)
|
||||
#max_standby_archive_delay = 30s # max delay before canceling queries
|
||||
# when reading WAL from archive;
|
||||
# -1 allows indefinite delay
|
||||
#max_standby_streaming_delay = 30s # max delay before canceling queries
|
||||
# when reading streaming WAL;
|
||||
# -1 allows indefinite delay
|
||||
#wal_receiver_create_temp_slot = off # create temp slot if primary_slot_name
|
||||
# is not set
|
||||
#wal_receiver_status_interval = 10s # send replies at least this often
|
||||
# 0 disables
|
||||
#hot_standby_feedback = off # send info from standby to prevent
|
||||
# query conflicts
|
||||
#wal_receiver_timeout = 60s # time that receiver waits for
|
||||
# communication from primary
|
||||
# in milliseconds; 0 disables
|
||||
#wal_retrieve_retry_interval = 5s # time to wait before retrying to
|
||||
# retrieve WAL after a failed attempt
|
||||
#recovery_min_apply_delay = 0 # minimum delay for applying changes during recovery
|
||||
|
||||
# - Subscribers -
|
||||
|
||||
# These settings are ignored on a publisher.
|
||||
|
||||
#max_logical_replication_workers = 4 # taken from max_worker_processes
|
||||
# (change requires restart)
|
||||
#max_sync_workers_per_subscription = 2 # taken from max_logical_replication_workers
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# QUERY TUNING
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Planner Method Configuration -
|
||||
|
||||
#enable_async_append = on
|
||||
#enable_bitmapscan = on
|
||||
#enable_gathermerge = on
|
||||
#enable_hashagg = on
|
||||
#enable_hashjoin = on
|
||||
#enable_incremental_sort = on
|
||||
#enable_indexscan = on
|
||||
#enable_indexonlyscan = on
|
||||
#enable_material = on
|
||||
#enable_memoize = on
|
||||
#enable_mergejoin = on
|
||||
#enable_nestloop = on
|
||||
#enable_parallel_append = on
|
||||
#enable_parallel_hash = on
|
||||
#enable_partition_pruning = on
|
||||
#enable_partitionwise_join = off
|
||||
#enable_partitionwise_aggregate = off
|
||||
#enable_seqscan = on
|
||||
#enable_sort = on
|
||||
#enable_tidscan = on
|
||||
|
||||
# - Planner Cost Constants -
|
||||
|
||||
#seq_page_cost = 1.0 # measured on an arbitrary scale
|
||||
#random_page_cost = 4.0 # same scale as above
|
||||
#cpu_tuple_cost = 0.01 # same scale as above
|
||||
#cpu_index_tuple_cost = 0.005 # same scale as above
|
||||
#cpu_operator_cost = 0.0025 # same scale as above
|
||||
#parallel_setup_cost = 1000.0 # same scale as above
|
||||
#parallel_tuple_cost = 0.1 # same scale as above
|
||||
#min_parallel_table_scan_size = 8MB
|
||||
#min_parallel_index_scan_size = 512kB
|
||||
#effective_cache_size = 4GB
|
||||
|
||||
#jit_above_cost = 100000 # perform JIT compilation if available
|
||||
# and query more expensive than this;
|
||||
# -1 disables
|
||||
#jit_inline_above_cost = 500000 # inline small functions if query is
|
||||
# more expensive than this; -1 disables
|
||||
#jit_optimize_above_cost = 500000 # use expensive JIT optimizations if
|
||||
# query is more expensive than this;
|
||||
# -1 disables
|
||||
|
||||
# - Genetic Query Optimizer -
|
||||
|
||||
#geqo = on
|
||||
#geqo_threshold = 12
|
||||
#geqo_effort = 5 # range 1-10
|
||||
#geqo_pool_size = 0 # selects default based on effort
|
||||
#geqo_generations = 0 # selects default based on effort
|
||||
#geqo_selection_bias = 2.0 # range 1.5-2.0
|
||||
#geqo_seed = 0.0 # range 0.0-1.0
|
||||
|
||||
# - Other Planner Options -
|
||||
|
||||
#default_statistics_target = 100 # range 1-10000
|
||||
#constraint_exclusion = partition # on, off, or partition
|
||||
#cursor_tuple_fraction = 0.1 # range 0.0-1.0
|
||||
#from_collapse_limit = 8
|
||||
#jit = on # allow JIT compilation
|
||||
#join_collapse_limit = 8 # 1 disables collapsing of explicit
|
||||
# JOIN clauses
|
||||
#plan_cache_mode = auto # auto, force_generic_plan or
|
||||
# force_custom_plan
|
||||
#recursive_worktable_factor = 10.0 # range 0.001-1000000
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# REPORTING AND LOGGING
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Where to Log -
|
||||
|
||||
#log_destination = 'stderr' # Valid values are combinations of
|
||||
# stderr, csvlog, jsonlog, syslog, and
|
||||
# eventlog, depending on platform.
|
||||
# csvlog and jsonlog require
|
||||
# logging_collector to be on.
|
||||
|
||||
# This is used when logging to stderr:
|
||||
#logging_collector = off # Enable capturing of stderr, jsonlog,
|
||||
# and csvlog into log files. Required
|
||||
# to be on for csvlogs and jsonlogs.
|
||||
# (change requires restart)
|
||||
|
||||
# These are only used if logging_collector is on:
|
||||
#log_directory = 'log' # directory where log files are written,
|
||||
# can be absolute or relative to PGDATA
|
||||
#log_filename = 'postgresql-%Y-%m-%d_%H%M%S.log' # log file name pattern,
|
||||
# can include strftime() escapes
|
||||
#log_file_mode = 0600 # creation mode for log files,
|
||||
# begin with 0 to use octal notation
|
||||
#log_rotation_age = 1d # Automatic rotation of logfiles will
|
||||
# happen after that time. 0 disables.
|
||||
#log_rotation_size = 10MB # Automatic rotation of logfiles will
|
||||
# happen after that much log output.
|
||||
# 0 disables.
|
||||
#log_truncate_on_rotation = off # If on, an existing log file with the
|
||||
# same name as the new log file will be
|
||||
# truncated rather than appended to.
|
||||
# But such truncation only occurs on
|
||||
# time-driven rotation, not on restarts
|
||||
# or size-driven rotation. Default is
|
||||
# off, meaning append to existing files
|
||||
# in all cases.
|
||||
|
||||
# These are relevant when logging to syslog:
|
||||
#syslog_facility = 'LOCAL0'
|
||||
#syslog_ident = 'postgres'
|
||||
#syslog_sequence_numbers = on
|
||||
#syslog_split_messages = on
|
||||
|
||||
# This is only relevant when logging to eventlog (Windows):
|
||||
# (change requires restart)
|
||||
#event_source = 'PostgreSQL'
|
||||
|
||||
# - When to Log -
|
||||
|
||||
#log_min_messages = warning # values in order of decreasing detail:
|
||||
# debug5
|
||||
# debug4
|
||||
# debug3
|
||||
# debug2
|
||||
# debug1
|
||||
# info
|
||||
# notice
|
||||
# warning
|
||||
# error
|
||||
# log
|
||||
# fatal
|
||||
# panic
|
||||
|
||||
#log_min_error_statement = error # values in order of decreasing detail:
|
||||
# debug5
|
||||
# debug4
|
||||
# debug3
|
||||
# debug2
|
||||
# debug1
|
||||
# info
|
||||
# notice
|
||||
# warning
|
||||
# error
|
||||
# log
|
||||
# fatal
|
||||
# panic (effectively off)
|
||||
|
||||
#log_min_duration_statement = -1 # -1 is disabled, 0 logs all statements
|
||||
# and their durations, > 0 logs only
|
||||
# statements running at least this number
|
||||
# of milliseconds
|
||||
|
||||
#log_min_duration_sample = -1 # -1 is disabled, 0 logs a sample of statements
|
||||
# and their durations, > 0 logs only a sample of
|
||||
# statements running at least this number
|
||||
# of milliseconds;
|
||||
# sample fraction is determined by log_statement_sample_rate
|
||||
|
||||
#log_statement_sample_rate = 1.0 # fraction of logged statements exceeding
|
||||
# log_min_duration_sample to be logged;
|
||||
# 1.0 logs all such statements, 0.0 never logs
|
||||
|
||||
|
||||
#log_transaction_sample_rate = 0.0 # fraction of transactions whose statements
|
||||
# are logged regardless of their duration; 1.0 logs all
|
||||
# statements from all transactions, 0.0 never logs
|
||||
|
||||
#log_startup_progress_interval = 10s # Time between progress updates for
|
||||
# long-running startup operations.
|
||||
# 0 disables the feature, > 0 indicates
|
||||
# the interval in milliseconds.
|
||||
|
||||
# - What to Log -
|
||||
|
||||
#debug_print_parse = off
|
||||
#debug_print_rewritten = off
|
||||
#debug_print_plan = off
|
||||
#debug_pretty_print = on
|
||||
#log_autovacuum_min_duration = 10min # log autovacuum activity;
|
||||
# -1 disables, 0 logs all actions and
|
||||
# their durations, > 0 logs only
|
||||
# actions running at least this number
|
||||
# of milliseconds.
|
||||
#log_checkpoints = on
|
||||
#log_connections = off
|
||||
#log_disconnections = off
|
||||
#log_duration = off
|
||||
#log_error_verbosity = default # terse, default, or verbose messages
|
||||
#log_hostname = off
|
||||
log_line_prefix = '%m [%p] %q%u@%d ' # special values:
|
||||
# %a = application name
|
||||
# %u = user name
|
||||
# %d = database name
|
||||
# %r = remote host and port
|
||||
# %h = remote host
|
||||
# %b = backend type
|
||||
# %p = process ID
|
||||
# %P = process ID of parallel group leader
|
||||
# %t = timestamp without milliseconds
|
||||
# %m = timestamp with milliseconds
|
||||
# %n = timestamp with milliseconds (as a Unix epoch)
|
||||
# %Q = query ID (0 if none or not computed)
|
||||
# %i = command tag
|
||||
# %e = SQL state
|
||||
# %c = session ID
|
||||
# %l = session line number
|
||||
# %s = session start timestamp
|
||||
# %v = virtual transaction ID
|
||||
# %x = transaction ID (0 if none)
|
||||
# %q = stop here in non-session
|
||||
# processes
|
||||
# %% = '%'
|
||||
# e.g. '<%u%%%d> '
|
||||
#log_lock_waits = off # log lock waits >= deadlock_timeout
|
||||
#log_recovery_conflict_waits = off # log standby recovery conflict waits
|
||||
# >= deadlock_timeout
|
||||
#log_parameter_max_length = -1 # when logging statements, limit logged
|
||||
# bind-parameter values to N bytes;
|
||||
# -1 means print in full, 0 disables
|
||||
#log_parameter_max_length_on_error = 0 # when logging an error, limit logged
|
||||
# bind-parameter values to N bytes;
|
||||
# -1 means print in full, 0 disables
|
||||
#log_statement = 'none' # none, ddl, mod, all
|
||||
#log_replication_commands = off
|
||||
#log_temp_files = -1 # log temporary files equal or larger
|
||||
# than the specified size in kilobytes;
|
||||
# -1 disables, 0 logs all temp files
|
||||
log_timezone = 'Etc/UTC'
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# PROCESS TITLE
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
cluster_name = '15/main' # added to process titles if nonempty
|
||||
# (change requires restart)
|
||||
#update_process_title = on
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# STATISTICS
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Cumulative Query and Index Statistics -
|
||||
|
||||
#track_activities = on
|
||||
#track_activity_query_size = 1024 # (change requires restart)
|
||||
#track_counts = on
|
||||
#track_io_timing = off
|
||||
#track_wal_io_timing = off
|
||||
#track_functions = none # none, pl, all
|
||||
#stats_fetch_consistency = cache
|
||||
|
||||
|
||||
# - Monitoring -
|
||||
|
||||
#compute_query_id = auto
|
||||
#log_statement_stats = off
|
||||
#log_parser_stats = off
|
||||
#log_planner_stats = off
|
||||
#log_executor_stats = off
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# AUTOVACUUM
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
#autovacuum = on # Enable autovacuum subprocess? 'on'
|
||||
# requires track_counts to also be on.
|
||||
#autovacuum_max_workers = 3 # max number of autovacuum subprocesses
|
||||
# (change requires restart)
|
||||
#autovacuum_naptime = 1min # time between autovacuum runs
|
||||
#autovacuum_vacuum_threshold = 50 # min number of row updates before
|
||||
# vacuum
|
||||
#autovacuum_vacuum_insert_threshold = 1000 # min number of row inserts
|
||||
# before vacuum; -1 disables insert
|
||||
# vacuums
|
||||
#autovacuum_analyze_threshold = 50 # min number of row updates before
|
||||
# analyze
|
||||
#autovacuum_vacuum_scale_factor = 0.2 # fraction of table size before vacuum
|
||||
#autovacuum_vacuum_insert_scale_factor = 0.2 # fraction of inserts over table
|
||||
# size before insert vacuum
|
||||
#autovacuum_analyze_scale_factor = 0.1 # fraction of table size before analyze
|
||||
#autovacuum_freeze_max_age = 200000000 # maximum XID age before forced vacuum
|
||||
# (change requires restart)
|
||||
#autovacuum_multixact_freeze_max_age = 400000000 # maximum multixact age
|
||||
# before forced vacuum
|
||||
# (change requires restart)
|
||||
#autovacuum_vacuum_cost_delay = 2ms # default vacuum cost delay for
|
||||
# autovacuum, in milliseconds;
|
||||
# -1 means use vacuum_cost_delay
|
||||
#autovacuum_vacuum_cost_limit = -1 # default vacuum cost limit for
|
||||
# autovacuum, -1 means use
|
||||
# vacuum_cost_limit
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# CLIENT CONNECTION DEFAULTS
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Statement Behavior -
|
||||
|
||||
#client_min_messages = notice # values in order of decreasing detail:
|
||||
# debug5
|
||||
# debug4
|
||||
# debug3
|
||||
# debug2
|
||||
# debug1
|
||||
# log
|
||||
# notice
|
||||
# warning
|
||||
# error
|
||||
#search_path = '"$user", public' # schema names
|
||||
#row_security = on
|
||||
#default_table_access_method = 'heap'
|
||||
#default_tablespace = '' # a tablespace name, '' uses the default
|
||||
#default_toast_compression = 'pglz' # 'pglz' or 'lz4'
|
||||
#temp_tablespaces = '' # a list of tablespace names, '' uses
|
||||
# only default tablespace
|
||||
#check_function_bodies = on
|
||||
#default_transaction_isolation = 'read committed'
|
||||
#default_transaction_read_only = off
|
||||
#default_transaction_deferrable = off
|
||||
#session_replication_role = 'origin'
|
||||
#statement_timeout = 0 # in milliseconds, 0 is disabled
|
||||
#lock_timeout = 0 # in milliseconds, 0 is disabled
|
||||
#idle_in_transaction_session_timeout = 0 # in milliseconds, 0 is disabled
|
||||
#idle_session_timeout = 0 # in milliseconds, 0 is disabled
|
||||
#vacuum_freeze_table_age = 150000000
|
||||
#vacuum_freeze_min_age = 50000000
|
||||
#vacuum_failsafe_age = 1600000000
|
||||
#vacuum_multixact_freeze_table_age = 150000000
|
||||
#vacuum_multixact_freeze_min_age = 5000000
|
||||
#vacuum_multixact_failsafe_age = 1600000000
|
||||
#bytea_output = 'hex' # hex, escape
|
||||
#xmlbinary = 'base64'
|
||||
#xmloption = 'content'
|
||||
#gin_pending_list_limit = 4MB
|
||||
|
||||
# - Locale and Formatting -
|
||||
|
||||
datestyle = 'iso, mdy'
|
||||
#intervalstyle = 'postgres'
|
||||
timezone = 'Etc/UTC'
|
||||
#timezone_abbreviations = 'Default' # Select the set of available time zone
|
||||
# abbreviations. Currently, there are
|
||||
# Default
|
||||
# Australia (historical usage)
|
||||
# India
|
||||
# You can create your own file in
|
||||
# share/timezonesets/.
|
||||
#extra_float_digits = 1 # min -15, max 3; any value >0 actually
|
||||
# selects precise output mode
|
||||
#client_encoding = sql_ascii # actually, defaults to database
|
||||
# encoding
|
||||
|
||||
# These settings are initialized by initdb, but they can be changed.
|
||||
lc_messages = 'C.UTF-8' # locale for system error message
|
||||
# strings
|
||||
lc_monetary = 'C.UTF-8' # locale for monetary formatting
|
||||
lc_numeric = 'C.UTF-8' # locale for number formatting
|
||||
lc_time = 'C.UTF-8' # locale for time formatting
|
||||
|
||||
# default configuration for text search
|
||||
default_text_search_config = 'pg_catalog.english'
|
||||
|
||||
# - Shared Library Preloading -
|
||||
|
||||
#local_preload_libraries = ''
|
||||
#session_preload_libraries = ''
|
||||
#shared_preload_libraries = '' # (change requires restart)
|
||||
#jit_provider = 'llvmjit' # JIT library to use
|
||||
|
||||
# - Other Defaults -
|
||||
|
||||
#dynamic_library_path = '$libdir'
|
||||
#extension_destdir = '' # prepend path when loading extensions
|
||||
# and shared objects (added by Debian)
|
||||
#gin_fuzzy_search_limit = 0
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# LOCK MANAGEMENT
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
#deadlock_timeout = 1s
|
||||
#max_locks_per_transaction = 64 # min 10
|
||||
# (change requires restart)
|
||||
#max_pred_locks_per_transaction = 64 # min 10
|
||||
# (change requires restart)
|
||||
#max_pred_locks_per_relation = -2 # negative values mean
|
||||
# (max_pred_locks_per_transaction
|
||||
# / -max_pred_locks_per_relation) - 1
|
||||
#max_pred_locks_per_page = 2 # min 0
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# VERSION AND PLATFORM COMPATIBILITY
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Previous PostgreSQL Versions -
|
||||
|
||||
#array_nulls = on
|
||||
#backslash_quote = safe_encoding # on, off, or safe_encoding
|
||||
#escape_string_warning = on
|
||||
#lo_compat_privileges = off
|
||||
#quote_all_identifiers = off
|
||||
#standard_conforming_strings = on
|
||||
#synchronize_seqscans = on
|
||||
|
||||
# - Other Platforms and Clients -
|
||||
|
||||
#transform_null_equals = off
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# ERROR HANDLING
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
#exit_on_error = off # terminate session on any error?
|
||||
#restart_after_crash = on # reinitialize after backend crash?
|
||||
#data_sync_retry = off # retry or panic on failure to fsync
|
||||
# data?
|
||||
# (change requires restart)
|
||||
#recovery_init_sync_method = fsync # fsync, syncfs (Linux 5.8+)
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# CONFIG FILE INCLUDES
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# These options allow settings to be loaded from files other than the
|
||||
# default postgresql.conf. Note that these are directives, not variable
|
||||
# assignments, so they can usefully be given more than once.
|
||||
|
||||
# include_dir = 'conf.d' # include files ending in '.conf' from
|
||||
# a directory, e.g., 'conf.d'
|
||||
#include_if_exists = '...' # include file only if it exists
|
||||
#include = '...' # include file
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# CUSTOMIZED OPTIONS
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# Add settings for extensions here
|
||||
|
||||
71
aio/supervisord-app
Normal file
71
aio/supervisord-app
Normal file
@@ -0,0 +1,71 @@
|
||||
|
||||
[program:web]
|
||||
command=node /app/web/web/server.js
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3001,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:space]
|
||||
command=node /app/space/space/server.js
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3002,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:admin]
|
||||
command=node /app/admin/admin/server.js
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3003,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:migrator]
|
||||
directory=/app/api
|
||||
command=sh -c "./bin/docker-entrypoint-migrator.sh"
|
||||
autostart=true
|
||||
autorestart=false
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:api]
|
||||
directory=/app/api
|
||||
command=sh -c "./bin/docker-entrypoint-api.sh"
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:worker]
|
||||
directory=/app/api
|
||||
command=sh -c "./bin/docker-entrypoint-worker.sh"
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:beat]
|
||||
directory=/app/api
|
||||
command=sh -c "./bin/docker-entrypoint-beat.sh"
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
user=root
|
||||
nodaemon=true
|
||||
stderr_logfile=/app/logs/error/supervisor.err.log
|
||||
stdout_logfile=/app/logs/access/supervisor.out.log
|
||||
stdout_logfile=/app/logs/access/supervisor.log
|
||||
|
||||
[program:redis]
|
||||
directory=/app/data/redis
|
||||
@@ -10,15 +10,15 @@ command=redis-server
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/redis.err.log
|
||||
stdout_logfile=/app/logs/access/redis.out.log
|
||||
stdout_logfile=/app/logs/access/redis.log
|
||||
|
||||
[program:postgresql]
|
||||
user=postgres
|
||||
command=/usr/lib/postgresql/15/bin/postgres --config-file=/etc/postgresql/15/main/postgresql.conf
|
||||
command=/usr/lib/postgresql/15/bin/postgres --config-file=/etc/postgresql/postgresql.conf
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/postgresql.err.log
|
||||
stdout_logfile=/app/logs/access/postgresql.out.log
|
||||
stdout_logfile=/app/logs/access/postgresql.log
|
||||
|
||||
[program:minio]
|
||||
directory=/app/data/minio
|
||||
@@ -26,12 +26,13 @@ command=minio server /app/data/minio
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/minio.err.log
|
||||
stdout_logfile=/app/logs/access/minio.out.log
|
||||
stdout_logfile=/app/logs/access/minio.log
|
||||
|
||||
[program:nginx]
|
||||
directory=/app/data/nginx
|
||||
command=/usr/sbin/nginx -g 'daemon off;'
|
||||
command=/app/nginx-start.sh
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/nginx.err.log
|
||||
stdout_logfile=/app/logs/access/nginx.out.log
|
||||
stdout_logfile=/app/logs/access/nginx.log
|
||||
|
||||
14
aio/supervisord-slim-base
Normal file
14
aio/supervisord-slim-base
Normal file
@@ -0,0 +1,14 @@
|
||||
[supervisord]
|
||||
user=root
|
||||
nodaemon=true
|
||||
stderr_logfile=/app/logs/error/supervisor.err.log
|
||||
stdout_logfile=/app/logs/access/supervisor.log
|
||||
|
||||
[program:nginx]
|
||||
directory=/app/data/nginx
|
||||
command=/app/nginx-start.sh
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/nginx.err.log
|
||||
stdout_logfile=/app/logs/access/nginx.log
|
||||
|
||||
@@ -1,115 +0,0 @@
|
||||
[supervisord]
|
||||
user=root
|
||||
nodaemon=true
|
||||
priority=1
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:redis]
|
||||
directory=/app/data/redis
|
||||
command=redis-server
|
||||
autostart=true
|
||||
autorestart=true
|
||||
priority=1
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:postgresql]
|
||||
user=postgres
|
||||
command=/usr/lib/postgresql/15/bin/postgres -D /var/lib/postgresql/data --config-file=/etc/postgresql/postgresql.conf
|
||||
autostart=true
|
||||
autorestart=true
|
||||
priority=1
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:minio]
|
||||
directory=/app/data/minio
|
||||
command=minio server /app/data/minio
|
||||
autostart=true
|
||||
autorestart=true
|
||||
priority=1
|
||||
stdout_logfile=/app/logs/access/minio.log
|
||||
stderr_logfile=/app/logs/error/minio.err.log
|
||||
|
||||
[program:nginx]
|
||||
command=/app/nginx-start.sh
|
||||
autostart=true
|
||||
autorestart=true
|
||||
priority=1
|
||||
stdout_logfile=/app/logs/access/nginx.log
|
||||
stderr_logfile=/app/logs/error/nginx.err.log
|
||||
|
||||
|
||||
[program:web]
|
||||
command=/app/aio.sh web
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3001,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:space]
|
||||
command=/app/aio.sh space
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3002,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:admin]
|
||||
command=/app/aio.sh admin
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3003,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:migrator]
|
||||
command=/app/aio.sh migrator
|
||||
autostart=true
|
||||
autorestart=false
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:api]
|
||||
command=/app/aio.sh api
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:worker]
|
||||
command=/app/aio.sh worker
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:beat]
|
||||
command=/app/aio.sh beat
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
@@ -50,3 +50,6 @@ GUNICORN_WORKERS=2
|
||||
ADMIN_BASE_URL=
|
||||
SPACE_BASE_URL=
|
||||
APP_BASE_URL=
|
||||
|
||||
# Hard delete files after days
|
||||
HARD_DELETE_AFTER_DAYS=
|
||||
@@ -1,4 +1,4 @@
|
||||
{
|
||||
"name": "plane-api",
|
||||
"version": "0.21.0"
|
||||
"version": "0.22.0"
|
||||
}
|
||||
|
||||
@@ -13,9 +13,9 @@ class CycleSerializer(BaseSerializer):
|
||||
started_issues = serializers.IntegerField(read_only=True)
|
||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_issues = serializers.IntegerField(read_only=True)
|
||||
total_estimates = serializers.IntegerField(read_only=True)
|
||||
completed_estimates = serializers.IntegerField(read_only=True)
|
||||
started_estimates = serializers.IntegerField(read_only=True)
|
||||
total_estimates = serializers.FloatField(read_only=True)
|
||||
completed_estimates = serializers.FloatField(read_only=True)
|
||||
started_estimates = serializers.FloatField(read_only=True)
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
@@ -40,6 +40,7 @@ class CycleSerializer(BaseSerializer):
|
||||
"workspace",
|
||||
"project",
|
||||
"owned_by",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -53,9 +53,7 @@ class IssueSerializer(BaseSerializer):
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
exclude = [
|
||||
@@ -131,7 +129,10 @@ class IssueSerializer(BaseSerializer):
|
||||
workspace_id = self.context["workspace_id"]
|
||||
default_assignee_id = self.context["default_assignee_id"]
|
||||
|
||||
issue = Issue.objects.create(**validated_data, project_id=project_id)
|
||||
issue = Issue.objects.create(
|
||||
**validated_data,
|
||||
project_id=project_id,
|
||||
)
|
||||
|
||||
# Issue Audit Users
|
||||
created_by_id = issue.created_by_id
|
||||
@@ -312,10 +313,14 @@ class IssueLinkSerializer(BaseSerializer):
|
||||
return IssueLink.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
if IssueLink.objects.filter(
|
||||
url=validated_data.get("url"),
|
||||
issue_id=instance.issue_id,
|
||||
).exclude(pk=instance.id).exists():
|
||||
if (
|
||||
IssueLink.objects.filter(
|
||||
url=validated_data.get("url"),
|
||||
issue_id=instance.issue_id,
|
||||
)
|
||||
.exclude(pk=instance.id)
|
||||
.exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
{"error": "URL already exists for this Issue"}
|
||||
)
|
||||
@@ -332,9 +337,7 @@ class IssueAttachmentSerializer(BaseSerializer):
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
@@ -427,3 +430,4 @@ class IssueExpandSerializer(BaseSerializer):
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
@@ -39,6 +39,7 @@ class ModuleSerializer(BaseSerializer):
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
def to_representation(self, instance):
|
||||
|
||||
@@ -31,6 +31,7 @@ class ProjectSerializer(BaseSerializer):
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
def validate(self, data):
|
||||
|
||||
@@ -4,6 +4,7 @@ from .issue import urlpatterns as issue_patterns
|
||||
from .cycle import urlpatterns as cycle_patterns
|
||||
from .module import urlpatterns as module_patterns
|
||||
from .inbox import urlpatterns as inbox_patterns
|
||||
from .member import urlpatterns as member_patterns
|
||||
|
||||
urlpatterns = [
|
||||
*project_patterns,
|
||||
@@ -12,4 +13,5 @@ urlpatterns = [
|
||||
*cycle_patterns,
|
||||
*module_patterns,
|
||||
*inbox_patterns,
|
||||
*member_patterns,
|
||||
]
|
||||
|
||||
@@ -7,6 +7,7 @@ from plane.api.views import (
|
||||
IssueCommentAPIEndpoint,
|
||||
IssueActivityAPIEndpoint,
|
||||
WorkspaceIssueAPIEndpoint,
|
||||
IssueAttachmentEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
@@ -65,4 +66,9 @@ urlpatterns = [
|
||||
IssueActivityAPIEndpoint.as_view(),
|
||||
name="activity",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/",
|
||||
IssueAttachmentEndpoint.as_view(),
|
||||
name="attachment",
|
||||
),
|
||||
]
|
||||
|
||||
13
apiserver/plane/api/urls/member.py
Normal file
13
apiserver/plane/api/urls/member.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views import (
|
||||
ProjectMemberAPIEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<str:project_id>/members/",
|
||||
ProjectMemberAPIEndpoint.as_view(),
|
||||
name="users",
|
||||
),
|
||||
]
|
||||
@@ -9,6 +9,7 @@ from .issue import (
|
||||
IssueLinkAPIEndpoint,
|
||||
IssueCommentAPIEndpoint,
|
||||
IssueActivityAPIEndpoint,
|
||||
IssueAttachmentEndpoint,
|
||||
)
|
||||
|
||||
from .cycle import (
|
||||
@@ -24,4 +25,7 @@ from .module import (
|
||||
ModuleArchiveUnarchiveAPIEndpoint,
|
||||
)
|
||||
|
||||
from .member import ProjectMemberAPIEndpoint
|
||||
|
||||
from .inbox import InboxIssueAPIEndpoint
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ from django.db.models import (
|
||||
OuterRef,
|
||||
Q,
|
||||
Sum,
|
||||
IntegerField,
|
||||
FloatField,
|
||||
)
|
||||
from django.db.models.functions import Cast
|
||||
|
||||
@@ -34,6 +34,7 @@ from plane.db.models import (
|
||||
Project,
|
||||
IssueAttachment,
|
||||
IssueLink,
|
||||
ProjectMember,
|
||||
)
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
|
||||
@@ -363,14 +364,28 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request, slug, project_id, pk):
|
||||
cycle = Cycle.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
if cycle.owned_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the cycle"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
cycle_issues = list(
|
||||
CycleIssue.objects.filter(
|
||||
cycle_id=self.kwargs.get("pk")
|
||||
).values_list("issue", flat=True)
|
||||
)
|
||||
cycle = Cycle.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.deleted",
|
||||
@@ -389,11 +404,14 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
# Delete the cycle
|
||||
cycle.delete()
|
||||
# Delete the cycle issues
|
||||
CycleIssue.objects.filter(
|
||||
cycle_id=self.kwargs.get("pk"),
|
||||
).delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
@@ -658,61 +676,63 @@ class CycleIssueAPIEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
issues = Issue.objects.filter(
|
||||
pk__in=issues, workspace__slug=slug, project_id=project_id
|
||||
).values_list("id", flat=True)
|
||||
|
||||
# Get all CycleIssues already created
|
||||
cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
|
||||
update_cycle_issue_activity = []
|
||||
record_to_create = []
|
||||
records_to_update = []
|
||||
|
||||
for issue in issues:
|
||||
cycle_issue = [
|
||||
cycle_issue
|
||||
for cycle_issue in cycle_issues
|
||||
if str(cycle_issue.issue_id) in issues
|
||||
]
|
||||
# Update only when cycle changes
|
||||
if len(cycle_issue):
|
||||
if cycle_issue[0].cycle_id != cycle_id:
|
||||
update_cycle_issue_activity.append(
|
||||
{
|
||||
"old_cycle_id": str(cycle_issue[0].cycle_id),
|
||||
"new_cycle_id": str(cycle_id),
|
||||
"issue_id": str(cycle_issue[0].issue_id),
|
||||
}
|
||||
)
|
||||
cycle_issue[0].cycle_id = cycle_id
|
||||
records_to_update.append(cycle_issue[0])
|
||||
else:
|
||||
record_to_create.append(
|
||||
CycleIssue(
|
||||
project_id=project_id,
|
||||
workspace=cycle.workspace,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
cycle=cycle,
|
||||
issue_id=issue,
|
||||
)
|
||||
)
|
||||
|
||||
CycleIssue.objects.bulk_create(
|
||||
record_to_create,
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
cycle_issues = list(
|
||||
CycleIssue.objects.filter(
|
||||
~Q(cycle_id=cycle_id), issue_id__in=issues
|
||||
)
|
||||
)
|
||||
CycleIssue.objects.bulk_update(
|
||||
records_to_update,
|
||||
["cycle"],
|
||||
|
||||
existing_issues = [
|
||||
str(cycle_issue.issue_id)
|
||||
for cycle_issue in cycle_issues
|
||||
if str(cycle_issue.issue_id) in issues
|
||||
]
|
||||
new_issues = list(set(issues) - set(existing_issues))
|
||||
|
||||
# New issues to create
|
||||
created_records = CycleIssue.objects.bulk_create(
|
||||
[
|
||||
CycleIssue(
|
||||
project_id=project_id,
|
||||
workspace_id=cycle.workspace_id,
|
||||
cycle_id=cycle_id,
|
||||
issue_id=issue,
|
||||
)
|
||||
for issue in new_issues
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
# Updated Issues
|
||||
updated_records = []
|
||||
update_cycle_issue_activity = []
|
||||
# Iterate over each cycle_issue in cycle_issues
|
||||
for cycle_issue in cycle_issues:
|
||||
old_cycle_id = cycle_issue.cycle_id
|
||||
# Update the cycle_issue's cycle_id
|
||||
cycle_issue.cycle_id = cycle_id
|
||||
# Add the modified cycle_issue to the records_to_update list
|
||||
updated_records.append(cycle_issue)
|
||||
# Record the update activity
|
||||
update_cycle_issue_activity.append(
|
||||
{
|
||||
"old_cycle_id": str(old_cycle_id),
|
||||
"new_cycle_id": str(cycle_id),
|
||||
"issue_id": str(cycle_issue.issue_id),
|
||||
}
|
||||
)
|
||||
|
||||
# Update the cycle issues
|
||||
CycleIssue.objects.bulk_update(
|
||||
updated_records, ["cycle_id"], batch_size=100
|
||||
)
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.created",
|
||||
requested_data=json.dumps({"cycles_list": str(issues)}),
|
||||
requested_data=json.dumps({"cycles_list": issues}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
@@ -720,13 +740,14 @@ class CycleIssueAPIEndpoint(BaseAPIView):
|
||||
{
|
||||
"updated_cycle_issues": update_cycle_issue_activity,
|
||||
"created_cycle_issues": serializers.serialize(
|
||||
"json", record_to_create
|
||||
"json", created_records
|
||||
),
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
|
||||
# Return all Cycle Issues
|
||||
return Response(
|
||||
CycleIssueSerializer(self.get_queryset(), many=True).data,
|
||||
@@ -856,7 +877,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
if estimate_type:
|
||||
assignee_estimate_data = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -867,12 +888,12 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
.values("display_name", "assignee_id", "avatar")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
@@ -882,7 +903,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
@@ -910,7 +931,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
label_distribution_data = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -921,12 +942,12 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
@@ -936,7 +957,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
@@ -971,7 +992,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
# Get the assignee distribution
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -1028,7 +1049,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
# Get the label distribution
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -1140,12 +1161,38 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
updated_cycles = []
|
||||
update_cycle_issue_activity = []
|
||||
for cycle_issue in cycle_issues:
|
||||
cycle_issue.cycle_id = new_cycle_id
|
||||
updated_cycles.append(cycle_issue)
|
||||
update_cycle_issue_activity.append(
|
||||
{
|
||||
"old_cycle_id": str(cycle_id),
|
||||
"new_cycle_id": str(new_cycle_id),
|
||||
"issue_id": str(cycle_issue.issue_id),
|
||||
}
|
||||
)
|
||||
|
||||
cycle_issues = CycleIssue.objects.bulk_update(
|
||||
updated_cycles, ["cycle_id"], batch_size=100
|
||||
)
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.created",
|
||||
requested_data=json.dumps({"cycles_list": []}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"updated_cycle_issues": update_cycle_issue_activity,
|
||||
"created_cycle_issues": "[]",
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -3,8 +3,11 @@ import json
|
||||
|
||||
# Django improts
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q, Value, UUIDField
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
@@ -149,7 +152,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
description_html=request.data.get("issue", {}).get(
|
||||
"description_html", "<p></p>"
|
||||
),
|
||||
priority=request.data.get("issue", {}).get("priority", "low"),
|
||||
priority=request.data.get("issue", {}).get("priority", "none"),
|
||||
project_id=project_id,
|
||||
state=state,
|
||||
)
|
||||
@@ -224,8 +227,27 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
issue_data = request.data.pop("issue", False)
|
||||
|
||||
if bool(issue_data):
|
||||
issue = Issue.objects.get(
|
||||
pk=issue_id, workspace__slug=slug, project_id=project_id
|
||||
issue = Issue.objects.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
).get(
|
||||
pk=issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
# Only allow guests and viewers to edit name and description
|
||||
if project_member.role <= 10:
|
||||
@@ -368,29 +390,26 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
inbox_id=inbox.id,
|
||||
)
|
||||
|
||||
# Get the project member
|
||||
project_member = ProjectMember.objects.get(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
# Check the inbox issue created
|
||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(
|
||||
request.user.id
|
||||
):
|
||||
return Response(
|
||||
{"error": "You cannot delete inbox issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check the issue status
|
||||
if inbox_issue.status in [-2, -1, 0, 2]:
|
||||
# Delete the issue also
|
||||
Issue.objects.filter(
|
||||
issue = Issue.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk=issue_id
|
||||
).delete()
|
||||
).first()
|
||||
if issue.created_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the issue"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
issue.delete()
|
||||
|
||||
inbox_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -22,9 +22,11 @@ from django.utils import timezone
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.parsers import MultiPartParser, FormParser
|
||||
|
||||
# Module imports
|
||||
from plane.api.serializers import (
|
||||
IssueAttachmentSerializer,
|
||||
IssueActivitySerializer,
|
||||
IssueCommentSerializer,
|
||||
IssueLinkSerializer,
|
||||
@@ -149,6 +151,25 @@ class IssueAPIEndpoint(BaseAPIView):
|
||||
).distinct()
|
||||
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
external_id = request.GET.get("external_id")
|
||||
external_source = request.GET.get("external_source")
|
||||
|
||||
if external_id and external_source:
|
||||
issue = Issue.objects.get(
|
||||
external_id=external_id,
|
||||
external_source=external_source,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
return Response(
|
||||
IssueSerializer(
|
||||
issue,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
if pk:
|
||||
issue = Issue.issue_objects.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(
|
||||
@@ -307,6 +328,17 @@ class IssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
serializer.save()
|
||||
# Refetch the issue
|
||||
issue = Issue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
pk=serializer.data["id"],
|
||||
).first()
|
||||
issue.created_at = request.data.get("created_at", timezone.now())
|
||||
issue.created_by_id = request.data.get(
|
||||
"created_by", request.user.id
|
||||
)
|
||||
issue.save(update_fields=["created_at", "created_by"])
|
||||
|
||||
# Track the issue
|
||||
issue_activity.delay(
|
||||
@@ -379,6 +411,19 @@ class IssueAPIEndpoint(BaseAPIView):
|
||||
issue = Issue.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
if issue.created_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the issue"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
current_instance = json.dumps(
|
||||
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
@@ -587,14 +632,20 @@ class IssueLinkAPIEndpoint(BaseAPIView):
|
||||
project_id=project_id,
|
||||
issue_id=issue_id,
|
||||
)
|
||||
|
||||
link = IssueLink.objects.get(pk=serializer.data["id"])
|
||||
link.created_by_id = request.data.get(
|
||||
"created_by", request.user.id
|
||||
)
|
||||
link.save(update_fields=["created_by"])
|
||||
issue_activity.delay(
|
||||
type="link.activity.created",
|
||||
requested_data=json.dumps(
|
||||
serializer.data, cls=DjangoJSONEncoder
|
||||
),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id")),
|
||||
project_id=str(self.kwargs.get("project_id")),
|
||||
actor_id=str(link.created_by_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
@@ -748,12 +799,24 @@ class IssueCommentAPIEndpoint(BaseAPIView):
|
||||
issue_id=issue_id,
|
||||
actor=request.user,
|
||||
)
|
||||
issue_comment = IssueComment.objects.get(
|
||||
pk=serializer.data.get("id")
|
||||
)
|
||||
# Update the created_at and the created_by and save the comment
|
||||
issue_comment.created_at = request.data.get(
|
||||
"created_at", timezone.now()
|
||||
)
|
||||
issue_comment.created_by_id = request.data.get(
|
||||
"created_by", request.user.id
|
||||
)
|
||||
issue_comment.save(update_fields=["created_at", "created_by"])
|
||||
|
||||
issue_activity.delay(
|
||||
type="comment.activity.created",
|
||||
requested_data=json.dumps(
|
||||
serializer.data, cls=DjangoJSONEncoder
|
||||
),
|
||||
actor_id=str(self.request.user.id),
|
||||
actor_id=str(issue_comment.created_by_id),
|
||||
issue_id=str(self.kwargs.get("issue_id")),
|
||||
project_id=str(self.kwargs.get("project_id")),
|
||||
current_instance=None,
|
||||
@@ -874,3 +937,83 @@ class IssueActivityAPIEndpoint(BaseAPIView):
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
|
||||
class IssueAttachmentEndpoint(BaseAPIView):
|
||||
serializer_class = IssueAttachmentSerializer
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
model = IssueAttachment
|
||||
parser_classes = (MultiPartParser, FormParser)
|
||||
|
||||
def post(self, request, slug, project_id, issue_id):
|
||||
serializer = IssueAttachmentSerializer(data=request.data)
|
||||
if (
|
||||
request.data.get("external_id")
|
||||
and request.data.get("external_source")
|
||||
and IssueAttachment.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
issue_id=issue_id,
|
||||
external_source=request.data.get("external_source"),
|
||||
external_id=request.data.get("external_id"),
|
||||
).exists()
|
||||
):
|
||||
issue_attachment = IssueAttachment.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
external_id=request.data.get("external_id"),
|
||||
external_source=request.data.get("external_source"),
|
||||
).first()
|
||||
return Response(
|
||||
{
|
||||
"error": "Issue attachment with the same external id and external source already exists",
|
||||
"id": str(issue_attachment.id),
|
||||
},
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save(project_id=project_id, issue_id=issue_id)
|
||||
issue_activity.delay(
|
||||
type="attachment.activity.created",
|
||||
requested_data=None,
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=json.dumps(
|
||||
serializer.data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request, slug, project_id, issue_id, pk):
|
||||
issue_attachment = IssueAttachment.objects.get(pk=pk)
|
||||
issue_attachment.asset.delete(save=False)
|
||||
issue_attachment.delete()
|
||||
issue_activity.delay(
|
||||
type="attachment.activity.deleted",
|
||||
requested_data=None,
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def get(self, request, slug, project_id, issue_id):
|
||||
issue_attachments = IssueAttachment.objects.filter(
|
||||
issue_id=issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = IssueAttachmentSerializer(issue_attachments, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
153
apiserver/plane/api/views/member.py
Normal file
153
apiserver/plane/api/views/member.py
Normal file
@@ -0,0 +1,153 @@
|
||||
# Python imports
|
||||
import uuid
|
||||
|
||||
# Django imports
|
||||
from django.contrib.auth.hashers import make_password
|
||||
from django.core.validators import validate_email
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
# Module imports
|
||||
from .base import BaseAPIView
|
||||
from plane.api.serializers import UserLiteSerializer
|
||||
from plane.db.models import (
|
||||
User,
|
||||
Workspace,
|
||||
Project,
|
||||
WorkspaceMember,
|
||||
ProjectMember,
|
||||
)
|
||||
|
||||
from plane.app.permissions import (
|
||||
ProjectMemberPermission,
|
||||
)
|
||||
|
||||
|
||||
# API endpoint to get and insert users inside the workspace
|
||||
class ProjectMemberAPIEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectMemberPermission,
|
||||
]
|
||||
|
||||
# Get all the users that are present inside the workspace
|
||||
def get(self, request, slug, project_id):
|
||||
# Check if the workspace exists
|
||||
if not Workspace.objects.filter(slug=slug).exists():
|
||||
return Response(
|
||||
{"error": "Provided workspace does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the workspace members that are present inside the workspace
|
||||
project_members = ProjectMember.objects.filter(
|
||||
project_id=project_id, workspace__slug=slug
|
||||
).values_list("member_id", flat=True)
|
||||
|
||||
# Get all the users that are present inside the workspace
|
||||
users = UserLiteSerializer(
|
||||
User.objects.filter(
|
||||
id__in=project_members,
|
||||
),
|
||||
many=True,
|
||||
).data
|
||||
|
||||
return Response(users, status=status.HTTP_200_OK)
|
||||
|
||||
# Insert a new user inside the workspace, and assign the user to the project
|
||||
def post(self, request, slug, project_id):
|
||||
# Check if user with email already exists, and send bad request if it's
|
||||
# not present, check for workspace and valid project mandat
|
||||
# ------------------- Validation -------------------
|
||||
if (
|
||||
request.data.get("email") is None
|
||||
or request.data.get("display_name") is None
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error": "Expected email, display_name, workspace_slug, project_id, one or more of the fields are missing."
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
email = request.data.get("email")
|
||||
|
||||
try:
|
||||
validate_email(email)
|
||||
except ValidationError:
|
||||
return Response(
|
||||
{"error": "Invalid email provided"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.filter(slug=slug).first()
|
||||
project = Project.objects.filter(pk=project_id).first()
|
||||
|
||||
if not all([workspace, project]):
|
||||
return Response(
|
||||
{"error": "Provided workspace or project does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check if user exists
|
||||
user = User.objects.filter(email=email).first()
|
||||
workspace_member = None
|
||||
project_member = None
|
||||
|
||||
if user:
|
||||
# Check if user is part of the workspace
|
||||
workspace_member = WorkspaceMember.objects.filter(
|
||||
workspace=workspace, member=user
|
||||
).first()
|
||||
if workspace_member:
|
||||
# Check if user is part of the project
|
||||
project_member = ProjectMember.objects.filter(
|
||||
project=project, member=user
|
||||
).first()
|
||||
if project_member:
|
||||
return Response(
|
||||
{
|
||||
"error": "User is already part of the workspace and project"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# If user does not exist, create the user
|
||||
if not user:
|
||||
user = User.objects.create(
|
||||
email=email,
|
||||
display_name=request.data.get("display_name"),
|
||||
first_name=request.data.get("first_name", ""),
|
||||
last_name=request.data.get("last_name", ""),
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
is_active=False,
|
||||
)
|
||||
user.save()
|
||||
|
||||
# Create a workspace member for the user if not already a member
|
||||
if not workspace_member:
|
||||
workspace_member = WorkspaceMember.objects.create(
|
||||
workspace=workspace,
|
||||
member=user,
|
||||
role=request.data.get("role", 10),
|
||||
)
|
||||
workspace_member.save()
|
||||
|
||||
# Create a project member for the user if not already a member
|
||||
if not project_member:
|
||||
project_member = ProjectMember.objects.create(
|
||||
project=project,
|
||||
member=user,
|
||||
role=request.data.get("role", 10),
|
||||
)
|
||||
project_member.save()
|
||||
|
||||
# Serialize the user and return the response
|
||||
user_data = UserLiteSerializer(user).data
|
||||
|
||||
return Response(user_data, status=status.HTTP_201_CREATED)
|
||||
|
||||
@@ -27,6 +27,7 @@ from plane.db.models import (
|
||||
ModuleIssue,
|
||||
ModuleLink,
|
||||
Project,
|
||||
ProjectMember,
|
||||
)
|
||||
|
||||
from .base import BaseAPIView
|
||||
@@ -265,6 +266,20 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
module = Module.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
if module.created_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the module"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
module_issues = list(
|
||||
ModuleIssue.objects.filter(module_id=pk).values_list(
|
||||
"issue", flat=True
|
||||
@@ -286,6 +301,10 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
module.delete()
|
||||
# Delete the module issues
|
||||
ModuleIssue.objects.filter(
|
||||
module=pk,
|
||||
).delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ from plane.app.permissions import ProjectBasePermission
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
Inbox,
|
||||
IssueProperty,
|
||||
IssueUserProperty,
|
||||
Module,
|
||||
Project,
|
||||
DeployBoard,
|
||||
@@ -165,7 +165,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
role=20,
|
||||
)
|
||||
# Also create the issue property for the user
|
||||
_ = IssueProperty.objects.create(
|
||||
_ = IssueUserProperty.objects.create(
|
||||
project_id=serializer.data["id"],
|
||||
user=request.user,
|
||||
)
|
||||
@@ -179,7 +179,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
role=20,
|
||||
)
|
||||
# Also create the issue property for the user
|
||||
IssueProperty.objects.create(
|
||||
IssueUserProperty.objects.create(
|
||||
project_id=serializer.data["id"],
|
||||
user_id=serializer.data["project_lead"],
|
||||
)
|
||||
@@ -240,6 +240,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
.filter(pk=serializer.data["id"])
|
||||
.first()
|
||||
)
|
||||
|
||||
# Model activity
|
||||
model_activity.delay(
|
||||
model_name="project",
|
||||
|
||||
@@ -50,7 +50,7 @@ from .issue import (
|
||||
IssueCreateSerializer,
|
||||
IssueActivitySerializer,
|
||||
IssueCommentSerializer,
|
||||
IssuePropertySerializer,
|
||||
IssueUserPropertySerializer,
|
||||
IssueAssigneeSerializer,
|
||||
LabelSerializer,
|
||||
IssueSerializer,
|
||||
@@ -91,6 +91,7 @@ from .page import (
|
||||
PageLogSerializer,
|
||||
SubPageSerializer,
|
||||
PageDetailSerializer,
|
||||
PageVersionSerializer,
|
||||
)
|
||||
|
||||
from .estimate import (
|
||||
@@ -120,3 +121,5 @@ from .exporter import ExporterHistorySerializer
|
||||
from .webhook import WebhookSerializer, WebhookLogSerializer
|
||||
|
||||
from .dashboard import DashboardSerializer, WidgetSerializer
|
||||
|
||||
from .favorite import UserFavoriteSerializer
|
||||
|
||||
101
apiserver/plane/app/serializers/favorite.py
Normal file
101
apiserver/plane/app/serializers/favorite.py
Normal file
@@ -0,0 +1,101 @@
|
||||
from rest_framework import serializers
|
||||
|
||||
from plane.db.models import (
|
||||
UserFavorite,
|
||||
Cycle,
|
||||
Module,
|
||||
Issue,
|
||||
IssueView,
|
||||
Page,
|
||||
Project,
|
||||
)
|
||||
|
||||
|
||||
class ProjectFavoriteLiteSerializer(serializers.ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = ["id", "name", "logo_props"]
|
||||
|
||||
|
||||
class PageFavoriteLiteSerializer(serializers.ModelSerializer):
|
||||
project_id = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Page
|
||||
fields = ["id", "name", "logo_props", "project_id"]
|
||||
|
||||
def get_project_id(self, obj):
|
||||
project = (
|
||||
obj.projects.first()
|
||||
) # This gets the first project related to the Page
|
||||
return project.id if project else None
|
||||
|
||||
|
||||
class CycleFavoriteLiteSerializer(serializers.ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Cycle
|
||||
fields = ["id", "name", "logo_props", "project_id"]
|
||||
|
||||
|
||||
class ModuleFavoriteLiteSerializer(serializers.ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Module
|
||||
fields = ["id", "name", "logo_props", "project_id"]
|
||||
|
||||
|
||||
class ViewFavoriteSerializer(serializers.ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = IssueView
|
||||
fields = ["id", "name", "logo_props", "project_id"]
|
||||
|
||||
|
||||
def get_entity_model_and_serializer(entity_type):
|
||||
entity_map = {
|
||||
"cycle": (Cycle, CycleFavoriteLiteSerializer),
|
||||
"issue": (Issue, None),
|
||||
"module": (Module, ModuleFavoriteLiteSerializer),
|
||||
"view": (IssueView, ViewFavoriteSerializer),
|
||||
"page": (Page, PageFavoriteLiteSerializer),
|
||||
"project": (Project, ProjectFavoriteLiteSerializer),
|
||||
"folder": (None, None),
|
||||
}
|
||||
return entity_map.get(entity_type, (None, None))
|
||||
|
||||
|
||||
class UserFavoriteSerializer(serializers.ModelSerializer):
|
||||
entity_data = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = UserFavorite
|
||||
fields = [
|
||||
"id",
|
||||
"entity_type",
|
||||
"entity_identifier",
|
||||
"entity_data",
|
||||
"name",
|
||||
"is_folder",
|
||||
"sequence",
|
||||
"parent",
|
||||
"workspace_id",
|
||||
"project_id",
|
||||
]
|
||||
read_only_fields = ["workspace", "created_by", "updated_by"]
|
||||
|
||||
def get_entity_data(self, obj):
|
||||
entity_type = obj.entity_type
|
||||
entity_identifier = obj.entity_identifier
|
||||
|
||||
entity_model, entity_serializer = get_entity_model_and_serializer(
|
||||
entity_type
|
||||
)
|
||||
if entity_model and entity_serializer:
|
||||
try:
|
||||
entity = entity_model.objects.get(pk=entity_identifier)
|
||||
return entity_serializer(entity).data
|
||||
except entity_model.DoesNotExist:
|
||||
return None
|
||||
return None
|
||||
@@ -17,7 +17,7 @@ from plane.db.models import (
|
||||
Issue,
|
||||
IssueActivity,
|
||||
IssueComment,
|
||||
IssueProperty,
|
||||
IssueUserProperty,
|
||||
IssueAssignee,
|
||||
IssueSubscriber,
|
||||
IssueLabel,
|
||||
@@ -135,7 +135,11 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
workspace_id = self.context["workspace_id"]
|
||||
default_assignee_id = self.context["default_assignee_id"]
|
||||
|
||||
issue = Issue.objects.create(**validated_data, project_id=project_id)
|
||||
# Create Issue
|
||||
issue = Issue.objects.create(
|
||||
**validated_data,
|
||||
project_id=project_id,
|
||||
)
|
||||
|
||||
# Issue Audit Users
|
||||
created_by_id = issue.created_by_id
|
||||
@@ -248,9 +252,9 @@ class IssueActivitySerializer(BaseSerializer):
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class IssuePropertySerializer(BaseSerializer):
|
||||
class IssueUserPropertySerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueProperty
|
||||
model = IssueUserProperty
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"user",
|
||||
@@ -459,10 +463,14 @@ class IssueLinkSerializer(BaseSerializer):
|
||||
return IssueLink.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
if IssueLink.objects.filter(
|
||||
url=validated_data.get("url"),
|
||||
issue_id=instance.issue_id,
|
||||
).exclude(pk=instance.id).exists():
|
||||
if (
|
||||
IssueLink.objects.filter(
|
||||
url=validated_data.get("url"),
|
||||
issue_id=instance.issue_id,
|
||||
)
|
||||
.exclude(pk=instance.id)
|
||||
.exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
{"error": "URL already exists for this Issue"}
|
||||
)
|
||||
@@ -509,7 +517,7 @@ class IssueAttachmentLiteSerializer(DynamicBaseSerializer):
|
||||
"attributes",
|
||||
"issue_id",
|
||||
"updated_at",
|
||||
"updated_by_id",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
@@ -525,6 +533,7 @@ class IssueReactionSerializer(BaseSerializer):
|
||||
"project",
|
||||
"issue",
|
||||
"actor",
|
||||
"deleted_at"
|
||||
]
|
||||
|
||||
|
||||
@@ -543,7 +552,7 @@ class CommentReactionSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = CommentReaction
|
||||
fields = "__all__"
|
||||
read_only_fields = ["workspace", "project", "comment", "actor"]
|
||||
read_only_fields = ["workspace", "project", "comment", "actor", "deleted_at"]
|
||||
|
||||
|
||||
class IssueVoteSerializer(BaseSerializer):
|
||||
|
||||
@@ -39,6 +39,7 @@ class ModuleWriteSerializer(BaseSerializer):
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"archived_at",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
def to_representation(self, instance):
|
||||
@@ -177,8 +178,8 @@ class ModuleSerializer(DynamicBaseSerializer):
|
||||
started_issues = serializers.IntegerField(read_only=True)
|
||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_issues = serializers.IntegerField(read_only=True)
|
||||
total_estimate_points = serializers.IntegerField(read_only=True)
|
||||
completed_estimate_points = serializers.IntegerField(read_only=True)
|
||||
total_estimate_points = serializers.FloatField(read_only=True)
|
||||
completed_estimate_points = serializers.FloatField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Module
|
||||
@@ -222,10 +223,10 @@ class ModuleSerializer(DynamicBaseSerializer):
|
||||
class ModuleDetailSerializer(ModuleSerializer):
|
||||
link_module = ModuleLinkSerializer(read_only=True, many=True)
|
||||
sub_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_estimate_points = serializers.IntegerField(read_only=True)
|
||||
unstarted_estimate_points = serializers.IntegerField(read_only=True)
|
||||
started_estimate_points = serializers.IntegerField(read_only=True)
|
||||
cancelled_estimate_points = serializers.IntegerField(read_only=True)
|
||||
backlog_estimate_points = serializers.FloatField(read_only=True)
|
||||
unstarted_estimate_points = serializers.FloatField(read_only=True)
|
||||
started_estimate_points = serializers.FloatField(read_only=True)
|
||||
cancelled_estimate_points = serializers.FloatField(read_only=True)
|
||||
|
||||
class Meta(ModuleSerializer.Meta):
|
||||
fields = ModuleSerializer.Meta.fields + ["link_module", "sub_issues", "backlog_estimate_points", "unstarted_estimate_points", "started_estimate_points", "cancelled_estimate_points"]
|
||||
|
||||
@@ -3,11 +3,16 @@ from .base import BaseSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from plane.db.models import Notification, UserNotificationPreference
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
class NotificationSerializer(BaseSerializer):
|
||||
triggered_by_details = UserLiteSerializer(
|
||||
read_only=True, source="triggered_by"
|
||||
)
|
||||
is_inbox_issue = serializers.BooleanField(read_only=True)
|
||||
is_mentioned_notification = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Notification
|
||||
|
||||
@@ -10,6 +10,7 @@ from plane.db.models import (
|
||||
Label,
|
||||
ProjectPage,
|
||||
Project,
|
||||
PageVersion,
|
||||
)
|
||||
|
||||
|
||||
@@ -161,3 +162,13 @@ class PageLogSerializer(BaseSerializer):
|
||||
"workspace",
|
||||
"page",
|
||||
]
|
||||
|
||||
|
||||
class PageVersionSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = PageVersion
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"page",
|
||||
]
|
||||
|
||||
@@ -28,6 +28,7 @@ class ProjectSerializer(BaseSerializer):
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
def create(self, validated_data):
|
||||
|
||||
@@ -23,7 +23,7 @@ class IssueViewSerializer(DynamicBaseSerializer):
|
||||
]
|
||||
|
||||
def create(self, validated_data):
|
||||
query_params = validated_data.get("query_data", {})
|
||||
query_params = validated_data.get("filters", {})
|
||||
if bool(query_params):
|
||||
validated_data["query"] = issue_filters(query_params, "POST")
|
||||
else:
|
||||
@@ -31,7 +31,7 @@ class IssueViewSerializer(DynamicBaseSerializer):
|
||||
return IssueView.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
query_params = validated_data.get("query_data", {})
|
||||
query_params = validated_data.get("filters", {})
|
||||
if bool(query_params):
|
||||
validated_data["query"] = issue_filters(query_params, "POST")
|
||||
else:
|
||||
|
||||
@@ -15,6 +15,7 @@ from plane.db.models import (
|
||||
WorkspaceTheme,
|
||||
WorkspaceUserProperties,
|
||||
)
|
||||
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
|
||||
|
||||
|
||||
class WorkSpaceSerializer(DynamicBaseSerializer):
|
||||
@@ -22,22 +23,11 @@ class WorkSpaceSerializer(DynamicBaseSerializer):
|
||||
total_members = serializers.IntegerField(read_only=True)
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
|
||||
def validated(self, data):
|
||||
if data.get("slug") in [
|
||||
"404",
|
||||
"accounts",
|
||||
"api",
|
||||
"create-workspace",
|
||||
"god-mode",
|
||||
"installations",
|
||||
"invitations",
|
||||
"onboarding",
|
||||
"profile",
|
||||
"spaces",
|
||||
"workspace-invitations",
|
||||
"password",
|
||||
]:
|
||||
raise serializers.ValidationError({"slug": "Slug is not valid"})
|
||||
def validate_slug(self, value):
|
||||
# Check if the slug is restricted
|
||||
if value in RESTRICTED_WORKSPACE_SLUGS:
|
||||
raise serializers.ValidationError("Slug is not valid")
|
||||
return value
|
||||
|
||||
class Meta:
|
||||
model = Workspace
|
||||
|
||||
@@ -233,13 +233,13 @@ urlpatterns = [
|
||||
name="project-issue-comment-reactions",
|
||||
),
|
||||
## End Comment Reactions
|
||||
## IssueProperty
|
||||
## IssueUserProperty
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-properties/",
|
||||
IssueUserDisplayPropertyEndpoint.as_view(),
|
||||
name="project-issue-display-properties",
|
||||
),
|
||||
## IssueProperty End
|
||||
## IssueUserProperty End
|
||||
## Issue Archives
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
|
||||
|
||||
@@ -7,6 +7,7 @@ from plane.app.views import (
|
||||
PageLogEndpoint,
|
||||
SubPagesEndpoint,
|
||||
PagesDescriptionViewSet,
|
||||
PageVersionEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -65,6 +66,16 @@ urlpatterns = [
|
||||
),
|
||||
name="project-pages-lock-unlock",
|
||||
),
|
||||
# private and public page
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/access/",
|
||||
PageViewSet.as_view(
|
||||
{
|
||||
"post": "access",
|
||||
}
|
||||
),
|
||||
name="project-pages-access",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/transactions/",
|
||||
PageLogEndpoint.as_view(),
|
||||
@@ -90,4 +101,14 @@ urlpatterns = [
|
||||
),
|
||||
name="page-description",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/versions/",
|
||||
PageVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/versions/<uuid:pk>/",
|
||||
PageVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -25,6 +25,8 @@ from plane.app.views import (
|
||||
ExportWorkspaceUserActivityEndpoint,
|
||||
WorkspaceModulesEndpoint,
|
||||
WorkspaceCyclesEndpoint,
|
||||
WorkspaceFavoriteEndpoint,
|
||||
WorkspaceFavoriteGroupEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -237,4 +239,19 @@ urlpatterns = [
|
||||
WorkspaceCyclesEndpoint.as_view(),
|
||||
name="workspace-cycles",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-favorites/",
|
||||
WorkspaceFavoriteEndpoint.as_view(),
|
||||
name="workspace-user-favorites",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-favorites/<uuid:favorite_id>/",
|
||||
WorkspaceFavoriteEndpoint.as_view(),
|
||||
name="workspace-user-favorites",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-favorites/<uuid:favorite_id>/group/",
|
||||
WorkspaceFavoriteGroupEndpoint.as_view(),
|
||||
name="workspace-user-favorites-groups",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -40,6 +40,11 @@ from .workspace.base import (
|
||||
ExportWorkspaceUserActivityEndpoint,
|
||||
)
|
||||
|
||||
from .workspace.favorite import (
|
||||
WorkspaceFavoriteEndpoint,
|
||||
WorkspaceFavoriteGroupEndpoint,
|
||||
)
|
||||
|
||||
from .workspace.member import (
|
||||
WorkSpaceMemberViewSet,
|
||||
TeamMemberViewSet,
|
||||
@@ -179,6 +184,7 @@ from .page.base import (
|
||||
SubPagesEndpoint,
|
||||
PagesDescriptionViewSet,
|
||||
)
|
||||
from .page.version import PageVersionEndpoint
|
||||
|
||||
from .search.base import GlobalSearchEndpoint
|
||||
from .search.issue import IssueSearchEndpoint
|
||||
|
||||
@@ -14,21 +14,18 @@ from django.db.models import (
|
||||
UUIDField,
|
||||
Value,
|
||||
When,
|
||||
Subquery,
|
||||
Sum,
|
||||
FloatField,
|
||||
)
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.db.models.functions import Coalesce, Cast
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
UserFavorite,
|
||||
Issue,
|
||||
Label,
|
||||
User,
|
||||
)
|
||||
from plane.db.models import Cycle, UserFavorite, Issue, Label, User, Project
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
|
||||
# Module imports
|
||||
@@ -49,6 +46,89 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
backlog_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="backlog",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
backlog_estimate_point=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("backlog_estimate_point")[:1]
|
||||
)
|
||||
unstarted_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="unstarted",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
unstarted_estimate_point=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("unstarted_estimate_point")[:1]
|
||||
)
|
||||
started_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="started",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
started_estimate_point=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("started_estimate_point")[:1]
|
||||
)
|
||||
cancelled_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="cancelled",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
cancelled_estimate_point=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("cancelled_estimate_point")[:1]
|
||||
)
|
||||
completed_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="completed",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
completed_estimate_points=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("completed_estimate_points")[:1]
|
||||
)
|
||||
total_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
total_estimate_points=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("total_estimate_points")[:1]
|
||||
)
|
||||
return (
|
||||
Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
@@ -172,25 +252,50 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_estimate_points=Coalesce(
|
||||
Subquery(backlog_estimate_point),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
unstarted_estimate_points=Coalesce(
|
||||
Subquery(unstarted_estimate_point),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
started_estimate_points=Coalesce(
|
||||
Subquery(started_estimate_point),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
cancelled_estimate_points=Coalesce(
|
||||
Subquery(cancelled_estimate_point),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_estimate_points=Coalesce(
|
||||
Subquery(completed_estimate_point),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
total_estimate_points=Coalesce(
|
||||
Subquery(total_estimate_point),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.order_by("-is_favorite", "name")
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
plot_type = request.GET.get("plot_type", "issues")
|
||||
if pk is None:
|
||||
queryset = (
|
||||
self.get_queryset()
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"issue_cycle",
|
||||
filter=Q(
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.values(
|
||||
self.get_queryset().values(
|
||||
# necessary fields
|
||||
"id",
|
||||
"workspace_id",
|
||||
@@ -256,7 +361,10 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
"external_id",
|
||||
"progress_snapshot",
|
||||
"sub_issues",
|
||||
"logo_props",
|
||||
# meta fields
|
||||
"completed_estimate_points",
|
||||
"total_estimate_points",
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
@@ -266,20 +374,117 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
"backlog_issues",
|
||||
"assignee_ids",
|
||||
"status",
|
||||
"created_by",
|
||||
"archived_at",
|
||||
)
|
||||
.first()
|
||||
)
|
||||
queryset = queryset.first()
|
||||
|
||||
if data is None:
|
||||
return Response(
|
||||
{"error": "Cycle does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
estimate_type = Project.objects.filter(
|
||||
workspace__slug=slug,
|
||||
pk=project_id,
|
||||
estimate__isnull=False,
|
||||
estimate__type="points",
|
||||
).exists()
|
||||
|
||||
data["estimate_distribution"] = {}
|
||||
if estimate_type:
|
||||
assignee_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.values("display_name", "assignee_id", "avatar")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("display_name")
|
||||
)
|
||||
|
||||
label_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
data["estimate_distribution"] = {
|
||||
"assignees": assignee_distribution,
|
||||
"labels": label_distribution,
|
||||
"completion_chart": {},
|
||||
}
|
||||
|
||||
if data["start_date"] and data["end_date"]:
|
||||
data["estimate_distribution"]["completion_chart"] = (
|
||||
burndown_plot(
|
||||
queryset=queryset,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="points",
|
||||
cycle_id=pk,
|
||||
)
|
||||
)
|
||||
|
||||
# Assignee Distribution
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -299,7 +504,10 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"id",
|
||||
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||
filter=Q(
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
@@ -327,7 +535,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
|
||||
# Label Distribution
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -339,7 +547,10 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"id",
|
||||
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||
filter=Q(
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
@@ -376,7 +587,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
queryset=queryset,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type=plot_type,
|
||||
plot_type="issues",
|
||||
cycle_id=pk,
|
||||
)
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ from django.db.models import (
|
||||
When,
|
||||
Subquery,
|
||||
Sum,
|
||||
IntegerField,
|
||||
FloatField,
|
||||
)
|
||||
from django.db.models.functions import Coalesce, Cast
|
||||
from django.utils import timezone
|
||||
@@ -47,6 +47,7 @@ from plane.db.models import (
|
||||
Label,
|
||||
User,
|
||||
Project,
|
||||
ProjectMember,
|
||||
)
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
|
||||
@@ -86,7 +87,7 @@ class CycleViewSet(BaseViewSet):
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
backlog_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("backlog_estimate_point")[:1]
|
||||
@@ -100,7 +101,7 @@ class CycleViewSet(BaseViewSet):
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
unstarted_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("unstarted_estimate_point")[:1]
|
||||
@@ -114,7 +115,7 @@ class CycleViewSet(BaseViewSet):
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
started_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("started_estimate_point")[:1]
|
||||
@@ -128,7 +129,7 @@ class CycleViewSet(BaseViewSet):
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
cancelled_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("cancelled_estimate_point")[:1]
|
||||
@@ -142,7 +143,7 @@ class CycleViewSet(BaseViewSet):
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
completed_estimate_points=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("completed_estimate_points")[:1]
|
||||
@@ -155,7 +156,7 @@ class CycleViewSet(BaseViewSet):
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
total_estimate_points=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("total_estimate_points")[:1]
|
||||
@@ -287,37 +288,37 @@ class CycleViewSet(BaseViewSet):
|
||||
.annotate(
|
||||
backlog_estimate_points=Coalesce(
|
||||
Subquery(backlog_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
unstarted_estimate_points=Coalesce(
|
||||
Subquery(unstarted_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
started_estimate_points=Coalesce(
|
||||
Subquery(started_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
cancelled_estimate_points=Coalesce(
|
||||
Subquery(cancelled_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_estimate_points=Coalesce(
|
||||
Subquery(completed_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
total_estimate_points=Coalesce(
|
||||
Subquery(total_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.order_by("-is_favorite", "name")
|
||||
@@ -384,7 +385,7 @@ class CycleViewSet(BaseViewSet):
|
||||
data[0]["estimate_distribution"] = {}
|
||||
if estimate_type:
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=data[0]["id"],
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -395,12 +396,12 @@ class CycleViewSet(BaseViewSet):
|
||||
.values("display_name", "assignee_id", "avatar")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
@@ -410,7 +411,7 @@ class CycleViewSet(BaseViewSet):
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
@@ -422,7 +423,7 @@ class CycleViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=data[0]["id"],
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -433,12 +434,12 @@ class CycleViewSet(BaseViewSet):
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
@@ -448,7 +449,7 @@ class CycleViewSet(BaseViewSet):
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
@@ -465,18 +466,18 @@ class CycleViewSet(BaseViewSet):
|
||||
}
|
||||
|
||||
if data[0]["start_date"] and data[0]["end_date"]:
|
||||
data[0]["estimate_distribution"]["completion_chart"] = (
|
||||
burndown_plot(
|
||||
queryset=queryset.first(),
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="points",
|
||||
cycle_id=data[0]["id"],
|
||||
)
|
||||
data[0]["estimate_distribution"][
|
||||
"completion_chart"
|
||||
] = burndown_plot(
|
||||
queryset=queryset.first(),
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="points",
|
||||
cycle_id=data[0]["id"],
|
||||
)
|
||||
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=data[0]["id"],
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -518,7 +519,7 @@ class CycleViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=data[0]["id"],
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -645,6 +646,8 @@ class CycleViewSet(BaseViewSet):
|
||||
"progress_snapshot",
|
||||
"logo_props",
|
||||
# meta fields
|
||||
"completed_estimate_points",
|
||||
"total_estimate_points",
|
||||
"is_favorite",
|
||||
"cancelled_issues",
|
||||
"total_issues",
|
||||
@@ -654,6 +657,7 @@ class CycleViewSet(BaseViewSet):
|
||||
"backlog_issues",
|
||||
"assignee_ids",
|
||||
"status",
|
||||
"created_by",
|
||||
)
|
||||
.first()
|
||||
)
|
||||
@@ -739,6 +743,8 @@ class CycleViewSet(BaseViewSet):
|
||||
"progress_snapshot",
|
||||
"logo_props",
|
||||
# meta fields
|
||||
"completed_estimate_points",
|
||||
"total_estimate_points",
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
@@ -748,6 +754,7 @@ class CycleViewSet(BaseViewSet):
|
||||
"backlog_issues",
|
||||
"assignee_ids",
|
||||
"status",
|
||||
"created_by",
|
||||
).first()
|
||||
|
||||
# Send the model activity
|
||||
@@ -800,6 +807,8 @@ class CycleViewSet(BaseViewSet):
|
||||
"sub_issues",
|
||||
"logo_props",
|
||||
# meta fields
|
||||
"completed_estimate_points",
|
||||
"total_estimate_points",
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
@@ -825,7 +834,7 @@ class CycleViewSet(BaseViewSet):
|
||||
data["estimate_distribution"] = {}
|
||||
if estimate_type:
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -836,12 +845,12 @@ class CycleViewSet(BaseViewSet):
|
||||
.values("display_name", "assignee_id", "avatar")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
@@ -851,7 +860,7 @@ class CycleViewSet(BaseViewSet):
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
@@ -863,7 +872,7 @@ class CycleViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -874,12 +883,12 @@ class CycleViewSet(BaseViewSet):
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
@@ -889,7 +898,7 @@ class CycleViewSet(BaseViewSet):
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
@@ -918,7 +927,7 @@ class CycleViewSet(BaseViewSet):
|
||||
|
||||
# Assignee Distribution
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -969,7 +978,7 @@ class CycleViewSet(BaseViewSet):
|
||||
|
||||
# Label Distribution
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -1031,14 +1040,28 @@ class CycleViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, project_id, pk):
|
||||
cycle = Cycle.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
if cycle.owned_by_id != request.user.id and not (
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or owner can delete the cycle"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
cycle_issues = list(
|
||||
CycleIssue.objects.filter(
|
||||
cycle_id=self.kwargs.get("pk")
|
||||
).values_list("issue", flat=True)
|
||||
)
|
||||
cycle = Cycle.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.deleted",
|
||||
@@ -1059,6 +1082,10 @@ class CycleViewSet(BaseViewSet):
|
||||
)
|
||||
# Delete the cycle
|
||||
cycle.delete()
|
||||
# Delete the cycle issues
|
||||
CycleIssue.objects.filter(
|
||||
cycle_id=self.kwargs.get("pk"),
|
||||
).delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@@ -1127,7 +1154,7 @@ class CycleFavoriteViewSet(BaseViewSet):
|
||||
workspace__slug=slug,
|
||||
entity_identifier=cycle_id,
|
||||
)
|
||||
cycle_favorite.delete()
|
||||
cycle_favorite.delete(soft=False)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@@ -1223,7 +1250,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
|
||||
if estimate_type:
|
||||
assignee_estimate_data = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -1234,12 +1261,12 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
.values("display_name", "assignee_id", "avatar")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
@@ -1249,7 +1276,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
@@ -1277,7 +1304,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
label_distribution_data = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -1288,12 +1315,12 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
@@ -1303,7 +1330,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
@@ -1338,7 +1365,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
|
||||
# Get the assignee distribution
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -1395,7 +1422,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
|
||||
# Get the label distribution
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -1507,14 +1534,40 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
updated_cycles = []
|
||||
update_cycle_issue_activity = []
|
||||
for cycle_issue in cycle_issues:
|
||||
cycle_issue.cycle_id = new_cycle_id
|
||||
updated_cycles.append(cycle_issue)
|
||||
update_cycle_issue_activity.append(
|
||||
{
|
||||
"old_cycle_id": str(cycle_id),
|
||||
"new_cycle_id": str(new_cycle_id),
|
||||
"issue_id": str(cycle_issue.issue_id),
|
||||
}
|
||||
)
|
||||
|
||||
cycle_issues = CycleIssue.objects.bulk_update(
|
||||
updated_cycles, ["cycle_id"], batch_size=100
|
||||
)
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.created",
|
||||
requested_data=json.dumps({"cycles_list": []}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"updated_cycle_issues": update_cycle_issue_activity,
|
||||
"created_cycle_issues": "[]",
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ from rest_framework.response import Response
|
||||
from plane.app.permissions import (
|
||||
ProjectEntityPermission,
|
||||
)
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet
|
||||
from plane.app.serializers import (
|
||||
@@ -45,7 +46,6 @@ from plane.utils.paginator import (
|
||||
SubGroupedOffsetPaginator,
|
||||
)
|
||||
|
||||
# Module imports
|
||||
|
||||
class CycleIssueViewSet(BaseViewSet):
|
||||
serializer_class = CycleIssueSerializer
|
||||
@@ -334,7 +334,7 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
|
||||
|
||||
def destroy(self, request, slug, project_id, cycle_id, issue_id):
|
||||
cycle_issue = CycleIssue.objects.get(
|
||||
cycle_issue = CycleIssue.objects.filter(
|
||||
issue_id=issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
|
||||
@@ -542,6 +542,7 @@ def dashboard_recent_collaborators(self, request, slug):
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__archived_at__isnull=True,
|
||||
is_active=True,
|
||||
)
|
||||
.annotate(
|
||||
num_activities=Coalesce(
|
||||
|
||||
@@ -41,6 +41,7 @@ class ExportIssuesEndpoint(BaseAPIView):
|
||||
project=project_ids,
|
||||
initiated_by=request.user,
|
||||
provider=provider,
|
||||
type="issue_exports",
|
||||
)
|
||||
|
||||
issue_export_task.delay(
|
||||
@@ -65,7 +66,8 @@ class ExportIssuesEndpoint(BaseAPIView):
|
||||
|
||||
def get(self, request, slug):
|
||||
exporter_history = ExporterHistory.objects.filter(
|
||||
workspace__slug=slug
|
||||
workspace__slug=slug,
|
||||
type="issue_exports",
|
||||
).select_related("workspace", "initiated_by")
|
||||
|
||||
if request.GET.get("per_page", False) and request.GET.get(
|
||||
|
||||
@@ -553,28 +553,27 @@ class InboxIssueViewSet(BaseViewSet):
|
||||
project_id=project_id,
|
||||
inbox_id=inbox_id,
|
||||
)
|
||||
# Get the project member
|
||||
project_member = ProjectMember.objects.get(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(
|
||||
request.user.id
|
||||
):
|
||||
return Response(
|
||||
{"error": "You cannot delete inbox issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check the issue status
|
||||
if inbox_issue.status in [-2, -1, 0, 2]:
|
||||
# Delete the issue also
|
||||
Issue.objects.filter(
|
||||
issue = Issue.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk=issue_id
|
||||
).delete()
|
||||
).first()
|
||||
if issue.created_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the issue"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
issue.delete()
|
||||
|
||||
inbox_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -66,6 +66,7 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.filter(deleted_at__isnull=True)
|
||||
.filter(archived_at__isnull=False)
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
|
||||
@@ -14,7 +14,7 @@ from rest_framework.parsers import MultiPartParser, FormParser
|
||||
from .. import BaseAPIView
|
||||
from plane.app.serializers import IssueAttachmentSerializer
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import IssueAttachment
|
||||
from plane.db.models import IssueAttachment, ProjectMember
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
|
||||
|
||||
@@ -49,6 +49,19 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
|
||||
def delete(self, request, slug, project_id, issue_id, pk):
|
||||
issue_attachment = IssueAttachment.objects.get(pk=pk)
|
||||
if issue_attachment.created_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the attachment"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
issue_attachment.asset.delete(save=False)
|
||||
issue_attachment.delete()
|
||||
issue_activity.delay(
|
||||
|
||||
@@ -32,7 +32,7 @@ from plane.app.permissions import (
|
||||
from plane.app.serializers import (
|
||||
IssueCreateSerializer,
|
||||
IssueDetailSerializer,
|
||||
IssuePropertySerializer,
|
||||
IssueUserPropertySerializer,
|
||||
IssueSerializer,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
@@ -40,10 +40,11 @@ from plane.db.models import (
|
||||
Issue,
|
||||
IssueAttachment,
|
||||
IssueLink,
|
||||
IssueProperty,
|
||||
IssueUserProperty,
|
||||
IssueReaction,
|
||||
IssueSubscriber,
|
||||
Project,
|
||||
ProjectMember,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
@@ -165,6 +166,7 @@ class IssueListEndpoint(BaseAPIView):
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
"deleted_at",
|
||||
)
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
issues = user_timezone_converter(
|
||||
@@ -399,6 +401,7 @@ class IssueViewSet(BaseViewSet):
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
"deleted_at",
|
||||
)
|
||||
.first()
|
||||
)
|
||||
@@ -481,7 +484,38 @@ class IssueViewSet(BaseViewSet):
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def partial_update(self, request, slug, project_id, pk=None):
|
||||
issue = self.get_queryset().filter(pk=pk).first()
|
||||
issue = (
|
||||
self.get_queryset()
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
.filter(pk=pk)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not issue:
|
||||
return Response(
|
||||
@@ -518,6 +552,20 @@ class IssueViewSet(BaseViewSet):
|
||||
issue = Issue.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
if issue.created_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the issue"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
issue.delete()
|
||||
issue_activity.delay(
|
||||
type="issue.activity.deleted",
|
||||
@@ -539,7 +587,7 @@ class IssueUserDisplayPropertyEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
def patch(self, request, slug, project_id):
|
||||
issue_property = IssueProperty.objects.get(
|
||||
issue_property = IssueUserProperty.objects.get(
|
||||
user=request.user,
|
||||
project_id=project_id,
|
||||
)
|
||||
@@ -554,14 +602,14 @@ class IssueUserDisplayPropertyEndpoint(BaseAPIView):
|
||||
"display_properties", issue_property.display_properties
|
||||
)
|
||||
issue_property.save()
|
||||
serializer = IssuePropertySerializer(issue_property)
|
||||
serializer = IssueUserPropertySerializer(issue_property)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
issue_property, _ = IssueProperty.objects.get_or_create(
|
||||
issue_property, _ = IssueUserProperty.objects.get_or_create(
|
||||
user=request.user, project_id=project_id
|
||||
)
|
||||
serializer = IssuePropertySerializer(issue_property)
|
||||
serializer = IssueUserPropertySerializer(issue_property)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@@ -571,6 +619,19 @@ class BulkDeleteIssuesEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
def delete(self, request, slug, project_id):
|
||||
if ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role__in=[15, 10, 5],
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists():
|
||||
|
||||
return Response(
|
||||
{"error": "Only admin can perform this action"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
issue_ids = request.data.get("issue_ids", [])
|
||||
|
||||
if not len(issue_ids):
|
||||
|
||||
@@ -40,6 +40,7 @@ from plane.db.models import (
|
||||
IssueReaction,
|
||||
IssueSubscriber,
|
||||
Project,
|
||||
ProjectMember,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
@@ -67,6 +68,7 @@ class IssueDraftViewSet(BaseViewSet):
|
||||
Issue.objects.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(is_draft=True)
|
||||
.filter(deleted_at__isnull=True)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels", "issue_module__module")
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
@@ -380,6 +382,19 @@ class IssueDraftViewSet(BaseViewSet):
|
||||
issue = Issue.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
if issue.created_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the issue"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
issue.delete()
|
||||
issue_activity.delay(
|
||||
type="issue_draft.activity.deleted",
|
||||
|
||||
@@ -3,8 +3,11 @@ import json
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q
|
||||
from django.db.models import Q, OuterRef, F, Func, UUIDField, Value, CharField
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
@@ -20,6 +23,9 @@ from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
IssueRelation,
|
||||
Issue,
|
||||
IssueAttachment,
|
||||
IssueLink,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
|
||||
@@ -31,24 +37,6 @@ class IssueRelationViewSet(BaseViewSet):
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(issue_id=self.kwargs.get("issue_id"))
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__archived_at__isnull=True,
|
||||
)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("issue")
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def list(self, request, slug, project_id, issue_id):
|
||||
issue_relations = (
|
||||
IssueRelation.objects.filter(
|
||||
@@ -61,56 +49,146 @@ class IssueRelationViewSet(BaseViewSet):
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
)
|
||||
|
||||
# get all blocking issues
|
||||
blocking_issues = issue_relations.filter(
|
||||
relation_type="blocked_by", related_issue_id=issue_id
|
||||
)
|
||||
).values_list("issue_id", flat=True)
|
||||
|
||||
# get all blocked by issues
|
||||
blocked_by_issues = issue_relations.filter(
|
||||
relation_type="blocked_by", issue_id=issue_id
|
||||
)
|
||||
).values_list("related_issue_id", flat=True)
|
||||
|
||||
# get all duplicate issues
|
||||
duplicate_issues = issue_relations.filter(
|
||||
issue_id=issue_id, relation_type="duplicate"
|
||||
)
|
||||
).values_list("related_issue_id", flat=True)
|
||||
|
||||
# get all relates to issues
|
||||
duplicate_issues_related = issue_relations.filter(
|
||||
related_issue_id=issue_id, relation_type="duplicate"
|
||||
)
|
||||
).values_list("issue_id", flat=True)
|
||||
|
||||
# get all relates to issues
|
||||
relates_to_issues = issue_relations.filter(
|
||||
issue_id=issue_id, relation_type="relates_to"
|
||||
)
|
||||
).values_list("related_issue_id", flat=True)
|
||||
|
||||
# get all relates to issues
|
||||
relates_to_issues_related = issue_relations.filter(
|
||||
related_issue_id=issue_id, relation_type="relates_to"
|
||||
)
|
||||
).values_list("issue_id", flat=True)
|
||||
|
||||
blocked_by_issues_serialized = IssueRelationSerializer(
|
||||
blocked_by_issues, many=True
|
||||
).data
|
||||
duplicate_issues_serialized = IssueRelationSerializer(
|
||||
duplicate_issues, many=True
|
||||
).data
|
||||
relates_to_issues_serialized = IssueRelationSerializer(
|
||||
relates_to_issues, many=True
|
||||
).data
|
||||
queryset = (
|
||||
Issue.issue_objects
|
||||
.filter(workspace__slug=slug)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels", "issue_module__module")
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(
|
||||
parent=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
).distinct()
|
||||
|
||||
# revere relation for blocked by issues
|
||||
blocking_issues_serialized = RelatedIssueSerializer(
|
||||
blocking_issues, many=True
|
||||
).data
|
||||
# reverse relation for duplicate issues
|
||||
duplicate_issues_related_serialized = RelatedIssueSerializer(
|
||||
duplicate_issues_related, many=True
|
||||
).data
|
||||
# reverse relation for related issues
|
||||
relates_to_issues_related_serialized = RelatedIssueSerializer(
|
||||
relates_to_issues_related, many=True
|
||||
).data
|
||||
# Fields
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"priority",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"relation_type",
|
||||
]
|
||||
|
||||
response_data = {
|
||||
"blocking": blocking_issues_serialized,
|
||||
"blocked_by": blocked_by_issues_serialized,
|
||||
"duplicate": duplicate_issues_serialized
|
||||
+ duplicate_issues_related_serialized,
|
||||
"relates_to": relates_to_issues_serialized
|
||||
+ relates_to_issues_related_serialized,
|
||||
"blocking": queryset.filter(pk__in=blocking_issues)
|
||||
.annotate(
|
||||
relation_type=Value("blocking", output_field=CharField())
|
||||
)
|
||||
.values(*fields),
|
||||
"blocked_by": queryset.filter(pk__in=blocked_by_issues)
|
||||
.annotate(
|
||||
relation_type=Value("blocked_by", output_field=CharField())
|
||||
)
|
||||
.values(*fields),
|
||||
"duplicate": queryset.filter(pk__in=duplicate_issues)
|
||||
.annotate(
|
||||
relation_type=Value(
|
||||
"duplicate",
|
||||
output_field=CharField(),
|
||||
)
|
||||
)
|
||||
.values(*fields)
|
||||
| queryset.filter(pk__in=duplicate_issues_related)
|
||||
.annotate(
|
||||
relation_type=Value(
|
||||
"duplicate",
|
||||
output_field=CharField(),
|
||||
)
|
||||
)
|
||||
.values(*fields),
|
||||
"relates_to": queryset.filter(pk__in=relates_to_issues)
|
||||
.annotate(
|
||||
relation_type=Value(
|
||||
"relates_to",
|
||||
output_field=CharField(),
|
||||
)
|
||||
)
|
||||
.values(*fields)
|
||||
| queryset.filter(pk__in=relates_to_issues_related)
|
||||
.annotate(
|
||||
relation_type=Value(
|
||||
"relates_to",
|
||||
output_field=CharField(),
|
||||
)
|
||||
)
|
||||
.values(*fields),
|
||||
}
|
||||
|
||||
return Response(response_data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -99,6 +99,7 @@ class SubIssuesEndpoint(BaseAPIView):
|
||||
),
|
||||
)
|
||||
.annotate(state_group=F("state__group"))
|
||||
.order_by("-created_at")
|
||||
)
|
||||
|
||||
# create's a dict with state group name with their respective issue id's
|
||||
|
||||
@@ -12,7 +12,8 @@ from django.db.models import (
|
||||
Subquery,
|
||||
UUIDField,
|
||||
Value,
|
||||
Sum
|
||||
Sum,
|
||||
FloatField,
|
||||
)
|
||||
from django.db.models.functions import Coalesce, Cast
|
||||
from django.utils import timezone
|
||||
@@ -44,8 +45,8 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
def get_queryset(self):
|
||||
favorite_subquery = UserFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
entity_identifier=OuterRef("pk"),
|
||||
entity_type="module",
|
||||
entity_identifier=OuterRef("pk"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
@@ -102,8 +103,93 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
.annotate(cnt=Count("pk"))
|
||||
.values("cnt")
|
||||
)
|
||||
completed_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="completed",
|
||||
issue_module__module_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_module__module_id")
|
||||
.annotate(
|
||||
completed_estimate_points=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("completed_estimate_points")[:1]
|
||||
)
|
||||
|
||||
total_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
issue_module__module_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_module__module_id")
|
||||
.annotate(
|
||||
total_estimate_points=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("total_estimate_points")[:1]
|
||||
)
|
||||
backlog_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="backlog",
|
||||
issue_module__module_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_module__module_id")
|
||||
.annotate(
|
||||
backlog_estimate_point=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("backlog_estimate_point")[:1]
|
||||
)
|
||||
unstarted_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="unstarted",
|
||||
issue_module__module_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_module__module_id")
|
||||
.annotate(
|
||||
unstarted_estimate_point=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("unstarted_estimate_point")[:1]
|
||||
)
|
||||
started_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="started",
|
||||
issue_module__module_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_module__module_id")
|
||||
.annotate(
|
||||
started_estimate_point=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("started_estimate_point")[:1]
|
||||
)
|
||||
cancelled_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="cancelled",
|
||||
issue_module__module_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_module__module_id")
|
||||
.annotate(
|
||||
cancelled_estimate_point=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("cancelled_estimate_point")[:1]
|
||||
)
|
||||
return (
|
||||
Module.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(archived_at__isnull=False)
|
||||
.annotate(is_favorite=Exists(favorite_subquery))
|
||||
.select_related("workspace", "project", "lead")
|
||||
@@ -152,6 +238,42 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
Value(0, output_field=IntegerField()),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_estimate_points=Coalesce(
|
||||
Subquery(backlog_estimate_point),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
unstarted_estimate_points=Coalesce(
|
||||
Subquery(unstarted_estimate_point),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
started_estimate_points=Coalesce(
|
||||
Subquery(started_estimate_point),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
cancelled_estimate_points=Coalesce(
|
||||
Subquery(cancelled_estimate_point),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_estimate_points=Coalesce(
|
||||
Subquery(completed_estimate_point),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
total_estimate_points=Coalesce(
|
||||
Subquery(total_estimate_point),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
member_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
@@ -232,8 +354,8 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
data["estimate_distribution"] = {}
|
||||
|
||||
if estimate_type:
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
assignee_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_module__module_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -252,12 +374,12 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
)
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
@@ -267,7 +389,7 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
@@ -278,8 +400,8 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
.order_by("first_name", "last_name")
|
||||
)
|
||||
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
label_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_module__module_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -290,12 +412,12 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
@@ -305,7 +427,7 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
@@ -315,8 +437,8 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
data["estimate_distribution"]["assignee"] = assignee_distribution
|
||||
data["estimate_distribution"]["label"] = label_distribution
|
||||
data["estimate_distribution"]["assignees"] = assignee_distribution
|
||||
data["estimate_distribution"]["labels"] = label_distribution
|
||||
|
||||
if modules and modules.start_date and modules.target_date:
|
||||
data["estimate_distribution"]["completion_chart"] = (
|
||||
@@ -328,8 +450,9 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
module_id=pk,
|
||||
)
|
||||
)
|
||||
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_module__module_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -353,7 +476,7 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
@@ -379,7 +502,7 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_module__module_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -425,8 +548,6 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
"labels": label_distribution,
|
||||
"completion_chart": {},
|
||||
}
|
||||
|
||||
# Fetch the modules
|
||||
if modules and modules.start_date and modules.target_date:
|
||||
data["distribution"]["completion_chart"] = burndown_plot(
|
||||
queryset=modules,
|
||||
|
||||
@@ -17,6 +17,7 @@ from django.db.models import (
|
||||
UUIDField,
|
||||
Value,
|
||||
Sum,
|
||||
FloatField,
|
||||
)
|
||||
from django.db.models.functions import Coalesce, Cast
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
@@ -47,6 +48,7 @@ from plane.db.models import (
|
||||
ModuleLink,
|
||||
ModuleUserProperties,
|
||||
Project,
|
||||
ProjectMember,
|
||||
)
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
from plane.utils.user_timezone_converter import user_timezone_converter
|
||||
@@ -138,7 +140,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
.values("issue_module__module_id")
|
||||
.annotate(
|
||||
completed_estimate_points=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("completed_estimate_points")[:1]
|
||||
@@ -152,7 +154,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
.values("issue_module__module_id")
|
||||
.annotate(
|
||||
total_estimate_points=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("total_estimate_points")[:1]
|
||||
@@ -166,7 +168,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
.values("issue_module__module_id")
|
||||
.annotate(
|
||||
backlog_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("backlog_estimate_point")[:1]
|
||||
@@ -180,7 +182,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
.values("issue_module__module_id")
|
||||
.annotate(
|
||||
unstarted_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("unstarted_estimate_point")[:1]
|
||||
@@ -194,7 +196,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
.values("issue_module__module_id")
|
||||
.annotate(
|
||||
started_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("started_estimate_point")[:1]
|
||||
@@ -208,7 +210,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
.values("issue_module__module_id")
|
||||
.annotate(
|
||||
cancelled_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.values("cancelled_estimate_point")[:1]
|
||||
@@ -270,37 +272,37 @@ class ModuleViewSet(BaseViewSet):
|
||||
.annotate(
|
||||
backlog_estimate_points=Coalesce(
|
||||
Subquery(backlog_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
unstarted_estimate_points=Coalesce(
|
||||
Subquery(unstarted_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
started_estimate_points=Coalesce(
|
||||
Subquery(started_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
cancelled_estimate_points=Coalesce(
|
||||
Subquery(cancelled_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_estimate_points=Coalesce(
|
||||
Subquery(completed_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
total_estimate_points=Coalesce(
|
||||
Subquery(total_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
Value(0, output_field=FloatField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
@@ -456,7 +458,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
|
||||
if estimate_type:
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_module__module_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -475,12 +477,12 @@ class ModuleViewSet(BaseViewSet):
|
||||
)
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
@@ -490,7 +492,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
@@ -502,7 +504,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_module__module_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -513,12 +515,12 @@ class ModuleViewSet(BaseViewSet):
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
Cast("estimate_point__value", FloatField())
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
@@ -528,7 +530,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", IntegerField()),
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
@@ -553,7 +555,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_module__module_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -603,7 +605,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_module__module_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -736,6 +738,21 @@ class ModuleViewSet(BaseViewSet):
|
||||
module = Module.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
|
||||
if module.created_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the module"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
module_issues = list(
|
||||
ModuleIssue.objects.filter(module_id=pk).values_list(
|
||||
"issue", flat=True
|
||||
@@ -756,6 +773,10 @@ class ModuleViewSet(BaseViewSet):
|
||||
for issue in module_issues
|
||||
]
|
||||
module.delete()
|
||||
# Delete the module issues
|
||||
ModuleIssue.objects.filter(
|
||||
module=pk,
|
||||
).delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@@ -819,7 +840,7 @@ class ModuleFavoriteViewSet(BaseViewSet):
|
||||
entity_type="module",
|
||||
entity_identifier=module_id,
|
||||
)
|
||||
module_favorite.delete()
|
||||
module_favorite.delete(soft=False)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
|
||||
@@ -250,7 +250,6 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
removed_modules = request.data.get("removed_modules", [])
|
||||
project = Project.objects.get(pk=project_id)
|
||||
|
||||
|
||||
if modules:
|
||||
_ = ModuleIssue.objects.bulk_create(
|
||||
[
|
||||
@@ -284,7 +283,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
]
|
||||
|
||||
for module_id in removed_modules:
|
||||
module_issue = ModuleIssue.objects.get(
|
||||
module_issue = ModuleIssue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
module_id=module_id,
|
||||
@@ -297,7 +296,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
{"module_name": module_issue.module.name}
|
||||
{"module_name": module_issue.first().module.name}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
@@ -308,7 +307,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
|
||||
|
||||
def destroy(self, request, slug, project_id, module_id, issue_id):
|
||||
module_issue = ModuleIssue.objects.get(
|
||||
module_issue = ModuleIssue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
module_id=module_id,
|
||||
@@ -321,7 +320,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
{"module_name": module_issue.module.name}
|
||||
{"module_name": module_issue.first().module.name}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Django imports
|
||||
from django.db.models import Exists, OuterRef, Q
|
||||
from django.db.models import Exists, OuterRef, Q, Case, When, BooleanField
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party imports
|
||||
@@ -43,13 +43,30 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
|
||||
# Get query parameters
|
||||
snoozed = request.GET.get("snoozed", "false")
|
||||
archived = request.GET.get("archived", "false")
|
||||
read = request.GET.get("read", "true")
|
||||
read = request.GET.get("read", None)
|
||||
type = request.GET.get("type", "all")
|
||||
mentioned = request.GET.get("mentioned", False)
|
||||
q_filters = Q()
|
||||
|
||||
inbox_issue = Issue.objects.filter(
|
||||
pk=OuterRef("entity_identifier"),
|
||||
issue_inbox__status__in=[0, 2, -2],
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
|
||||
notifications = (
|
||||
Notification.objects.filter(
|
||||
workspace__slug=slug, receiver_id=request.user.id
|
||||
)
|
||||
.filter(entity_name="issue")
|
||||
.annotate(is_inbox_issue=Exists(inbox_issue))
|
||||
.annotate(
|
||||
is_mentioned_notification=Case(
|
||||
When(sender__icontains="mentioned", then=True),
|
||||
default=False,
|
||||
output_field=BooleanField(),
|
||||
)
|
||||
)
|
||||
.select_related("workspace", "project", "triggered_by", "receiver")
|
||||
.order_by("snoozed_till", "-created_at")
|
||||
)
|
||||
@@ -74,8 +91,19 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
|
||||
if read == "false":
|
||||
notifications = notifications.filter(read_at__isnull=True)
|
||||
|
||||
if read == "true":
|
||||
notifications = notifications.filter(read_at__isnull=False)
|
||||
|
||||
if mentioned:
|
||||
notifications = notifications.filter(sender__icontains="mentioned")
|
||||
else:
|
||||
notifications = notifications.exclude(
|
||||
sender__icontains="mentioned"
|
||||
)
|
||||
|
||||
type = type.split(",")
|
||||
# Subscribed issues
|
||||
if type == "watching":
|
||||
if "subscribed" in type:
|
||||
issue_ids = (
|
||||
IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug, subscriber_id=request.user.id
|
||||
@@ -97,35 +125,32 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
|
||||
.filter(created=False, assigned=False)
|
||||
.values_list("issue_id", flat=True)
|
||||
)
|
||||
notifications = notifications.filter(
|
||||
entity_identifier__in=issue_ids,
|
||||
)
|
||||
q_filters |= Q(entity_identifier__in=issue_ids)
|
||||
|
||||
# Assigned Issues
|
||||
if type == "assigned":
|
||||
if "assigned" in type:
|
||||
issue_ids = IssueAssignee.objects.filter(
|
||||
workspace__slug=slug, assignee_id=request.user.id
|
||||
).values_list("issue_id", flat=True)
|
||||
notifications = notifications.filter(
|
||||
entity_identifier__in=issue_ids
|
||||
)
|
||||
q_filters |= Q(entity_identifier__in=issue_ids)
|
||||
|
||||
# Created issues
|
||||
if type == "created":
|
||||
if "created" in type:
|
||||
if WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role__lt=15,
|
||||
is_active=True,
|
||||
).exists():
|
||||
notifications = Notification.objects.none()
|
||||
notifications = notifications.none()
|
||||
else:
|
||||
issue_ids = Issue.objects.filter(
|
||||
workspace__slug=slug, created_by=request.user
|
||||
).values_list("pk", flat=True)
|
||||
notifications = notifications.filter(
|
||||
entity_identifier__in=issue_ids
|
||||
)
|
||||
q_filters |= Q(entity_identifier__in=issue_ids)
|
||||
|
||||
# Apply the combined Q object filters
|
||||
notifications = notifications.filter(q_filters)
|
||||
|
||||
# Pagination
|
||||
if request.GET.get("per_page", False) and request.GET.get(
|
||||
@@ -200,43 +225,35 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
|
||||
class UnreadNotificationEndpoint(BaseAPIView):
|
||||
def get(self, request, slug):
|
||||
# Watching Issues Count
|
||||
watching_issues_count = Notification.objects.filter(
|
||||
workspace__slug=slug,
|
||||
receiver_id=request.user.id,
|
||||
read_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
entity_identifier__in=IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug, subscriber_id=request.user.id
|
||||
).values_list("issue_id", flat=True),
|
||||
).count()
|
||||
unread_notifications_count = (
|
||||
Notification.objects.filter(
|
||||
workspace__slug=slug,
|
||||
receiver_id=request.user.id,
|
||||
read_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
snoozed_till__isnull=True,
|
||||
)
|
||||
.exclude(sender__icontains="mentioned")
|
||||
.count()
|
||||
)
|
||||
|
||||
# My Issues Count
|
||||
my_issues_count = Notification.objects.filter(
|
||||
mention_notifications_count = Notification.objects.filter(
|
||||
workspace__slug=slug,
|
||||
receiver_id=request.user.id,
|
||||
read_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
entity_identifier__in=IssueAssignee.objects.filter(
|
||||
workspace__slug=slug, assignee_id=request.user.id
|
||||
).values_list("issue_id", flat=True),
|
||||
).count()
|
||||
|
||||
# Created Issues Count
|
||||
created_issues_count = Notification.objects.filter(
|
||||
workspace__slug=slug,
|
||||
receiver_id=request.user.id,
|
||||
read_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
entity_identifier__in=Issue.objects.filter(
|
||||
workspace__slug=slug, created_by=request.user
|
||||
).values_list("pk", flat=True),
|
||||
snoozed_till__isnull=True,
|
||||
sender__icontains="mentioned",
|
||||
).count()
|
||||
|
||||
return Response(
|
||||
{
|
||||
"watching_issues": watching_issues_count,
|
||||
"my_issues": my_issues_count,
|
||||
"created_issues": created_issues_count,
|
||||
"total_unread_notifications_count": int(
|
||||
unread_notifications_count
|
||||
),
|
||||
"mention_unread_notifications_count": int(
|
||||
mention_notifications_count
|
||||
),
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@@ -38,6 +38,7 @@ from plane.db.models import (
|
||||
from ..base import BaseAPIView, BaseViewSet
|
||||
|
||||
from plane.bgtasks.page_transaction_task import page_transaction
|
||||
from plane.bgtasks.page_version_task import page_version
|
||||
|
||||
|
||||
def unarchive_archive_page_and_descendants(page_id, archived_at):
|
||||
@@ -215,8 +216,14 @@ class PageViewSet(BaseViewSet):
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
else:
|
||||
issue_ids = PageLog.objects.filter(
|
||||
page_id=pk, entity_name="issue"
|
||||
).values_list("entity_identifier", flat=True)
|
||||
data = PageDetailSerializer(page).data
|
||||
data["issue_ids"] = issue_ids
|
||||
return Response(
|
||||
PageDetailSerializer(page).data, status=status.HTTP_200_OK
|
||||
data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def lock(self, request, slug, project_id, pk):
|
||||
@@ -238,6 +245,28 @@ class PageViewSet(BaseViewSet):
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def access(self, request, slug, project_id, pk):
|
||||
access = request.data.get("access", 0)
|
||||
page = Page.objects.filter(
|
||||
pk=pk, workspace__slug=slug, projects__id=project_id
|
||||
).first()
|
||||
|
||||
# Only update access if the page owner is the requesting user
|
||||
if (
|
||||
page.access != request.data.get("access", page.access)
|
||||
and page.owned_by_id != request.user.id
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error": "Access cannot be updated since this page is owned by someone else"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
page.access = access
|
||||
page.save()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def list(self, request, slug, project_id):
|
||||
queryset = self.get_queryset()
|
||||
pages = PageSerializer(queryset, many=True).data
|
||||
@@ -304,27 +333,26 @@ class PageViewSet(BaseViewSet):
|
||||
pk=pk, workspace__slug=slug, projects__id=project_id
|
||||
)
|
||||
|
||||
# only the owner and admin can delete the page
|
||||
if (
|
||||
ProjectMember.objects.filter(
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
role__gt=20,
|
||||
).exists()
|
||||
or request.user.id != page.owned_by_id
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only the owner and admin can delete the page"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if page.archived_at is None:
|
||||
return Response(
|
||||
{"error": "The page should be archived before deleting"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if page.owned_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or owner can delete the page"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
# remove parent from all the children
|
||||
_ = Page.objects.filter(
|
||||
parent_id=pk, projects__id=project_id, workspace__slug=slug
|
||||
@@ -358,7 +386,7 @@ class PageFavoriteViewSet(BaseViewSet):
|
||||
entity_identifier=pk,
|
||||
entity_type="page",
|
||||
)
|
||||
page_favorite.delete()
|
||||
page_favorite.delete(soft=False)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@@ -431,8 +459,12 @@ class PagesDescriptionViewSet(BaseViewSet):
|
||||
]
|
||||
|
||||
def retrieve(self, request, slug, project_id, pk):
|
||||
page = Page.objects.get(
|
||||
pk=pk, workspace__slug=slug, projects__id=project_id
|
||||
page = (
|
||||
Page.objects.filter(
|
||||
pk=pk, workspace__slug=slug, projects__id=project_id
|
||||
)
|
||||
.filter(Q(owned_by=self.request.user) | Q(access=0))
|
||||
.first()
|
||||
)
|
||||
binary_data = page.description_binary
|
||||
|
||||
@@ -451,20 +483,64 @@ class PagesDescriptionViewSet(BaseViewSet):
|
||||
return response
|
||||
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
page = Page.objects.get(
|
||||
pk=pk, workspace__slug=slug, projects__id=project_id
|
||||
page = (
|
||||
Page.objects.filter(
|
||||
pk=pk, workspace__slug=slug, projects__id=project_id
|
||||
)
|
||||
.filter(Q(owned_by=self.request.user) | Q(access=0))
|
||||
.first()
|
||||
)
|
||||
|
||||
if page is None:
|
||||
return Response(
|
||||
{"error": "Page not found"},
|
||||
status=404,
|
||||
)
|
||||
|
||||
if page.is_locked:
|
||||
return Response(
|
||||
{"error": "Page is locked"},
|
||||
status=471,
|
||||
)
|
||||
|
||||
if page.archived_at:
|
||||
return Response(
|
||||
{"error": "Page is archived"},
|
||||
status=472,
|
||||
)
|
||||
|
||||
# Serialize the existing instance
|
||||
existing_instance = json.dumps(
|
||||
{
|
||||
"description_html": page.description_html,
|
||||
},
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
|
||||
# Get the base64 data from the request
|
||||
base64_data = request.data.get("description_binary")
|
||||
|
||||
# If base64 data is provided
|
||||
if base64_data:
|
||||
# Decode the base64 data to bytes
|
||||
new_binary_data = base64.b64decode(base64_data)
|
||||
|
||||
# capture the page transaction
|
||||
if request.data.get("description_html"):
|
||||
page_transaction.delay(
|
||||
new_value=request.data,
|
||||
old_value=existing_instance,
|
||||
page_id=pk,
|
||||
)
|
||||
# Store the updated binary data
|
||||
page.description_binary = new_binary_data
|
||||
page.description_html = request.data.get("description_html")
|
||||
page.save()
|
||||
# Return a success response
|
||||
page_version.delay(
|
||||
page_id=page.id,
|
||||
existing_instance=existing_instance,
|
||||
user_id=request.user.id,
|
||||
)
|
||||
return Response({"message": "Updated successfully"})
|
||||
else:
|
||||
return Response({"error": "No binary data provided"})
|
||||
|
||||
37
apiserver/plane/app/views/page/version.py
Normal file
37
apiserver/plane/app/views/page/version.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import PageVersion
|
||||
from ..base import BaseAPIView
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.app.serializers import PageVersionSerializer
|
||||
|
||||
|
||||
class PageVersionEndpoint(BaseAPIView):
|
||||
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id, page_id, pk=None):
|
||||
# Check if pk is provided
|
||||
if pk:
|
||||
# Return a single page version
|
||||
page_version = PageVersion.objects.get(
|
||||
workspace__slug=slug,
|
||||
page_id=page_id,
|
||||
pk=pk,
|
||||
)
|
||||
# Serialize the page version
|
||||
serializer = PageVersionSerializer(page_version)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
# Return all page versions
|
||||
page_versions = PageVersion.objects.filter(
|
||||
workspace__slug=slug,
|
||||
page_id=page_id,
|
||||
)
|
||||
# Serialize the page versions
|
||||
serializer = PageVersionSerializer(page_versions, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -39,7 +39,7 @@ from plane.db.models import (
|
||||
Cycle,
|
||||
Inbox,
|
||||
DeployBoard,
|
||||
IssueProperty,
|
||||
IssueUserProperty,
|
||||
Issue,
|
||||
Module,
|
||||
Project,
|
||||
@@ -266,7 +266,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
role=20,
|
||||
)
|
||||
# Also create the issue property for the user
|
||||
_ = IssueProperty.objects.create(
|
||||
_ = IssueUserProperty.objects.create(
|
||||
project_id=serializer.data["id"],
|
||||
user=request.user,
|
||||
)
|
||||
@@ -280,7 +280,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
role=20,
|
||||
)
|
||||
# Also create the issue property for the user
|
||||
IssueProperty.objects.create(
|
||||
IssueUserProperty.objects.create(
|
||||
project_id=serializer.data["id"],
|
||||
user_id=serializer.data["project_lead"],
|
||||
)
|
||||
@@ -342,6 +342,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
.first()
|
||||
)
|
||||
|
||||
# Create the model activity
|
||||
model_activity.delay(
|
||||
model_name="project",
|
||||
model_id=str(project.id),
|
||||
@@ -598,7 +599,7 @@ class ProjectFavoritesViewSet(BaseViewSet):
|
||||
user=request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
project_favorite.delete()
|
||||
project_favorite.delete(soft=False)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ from plane.db.models import (
|
||||
ProjectMemberInvite,
|
||||
User,
|
||||
WorkspaceMember,
|
||||
IssueProperty,
|
||||
IssueUserProperty,
|
||||
)
|
||||
|
||||
|
||||
@@ -179,9 +179,9 @@ class UserProjectInvitationsViewset(BaseViewSet):
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
IssueProperty.objects.bulk_create(
|
||||
IssueUserProperty.objects.bulk_create(
|
||||
[
|
||||
IssueProperty(
|
||||
IssueUserProperty(
|
||||
project_id=project_id,
|
||||
user=request.user,
|
||||
workspace=workspace,
|
||||
|
||||
@@ -22,8 +22,10 @@ from plane.db.models import (
|
||||
ProjectMember,
|
||||
Workspace,
|
||||
TeamMember,
|
||||
IssueProperty,
|
||||
IssueUserProperty,
|
||||
)
|
||||
from plane.bgtasks.project_add_user_email_task import project_add_user_email
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ProjectMemberViewSet(BaseViewSet):
|
||||
@@ -64,33 +66,29 @@ class ProjectMemberViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
# Get the list of members to be added to the project and their roles i.e. the user_id and the role
|
||||
members = request.data.get("members", [])
|
||||
|
||||
# get the project
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
|
||||
# Check if the members array is empty
|
||||
if not len(members):
|
||||
return Response(
|
||||
{"error": "Atleast one member is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Initialize the bulk arrays
|
||||
bulk_project_members = []
|
||||
bulk_issue_props = []
|
||||
|
||||
project_members = (
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member_id__in=[member.get("member_id") for member in members],
|
||||
)
|
||||
.values("member_id", "sort_order")
|
||||
.order_by("sort_order")
|
||||
)
|
||||
|
||||
bulk_project_members = []
|
||||
# Create a dictionary of the member_id and their roles
|
||||
member_roles = {
|
||||
member.get("member_id"): member.get("role") for member in members
|
||||
}
|
||||
# Update roles in the members array based on the member_roles dictionary
|
||||
|
||||
# Update roles in the members array based on the member_roles dictionary and set is_active to True
|
||||
for project_member in ProjectMember.objects.filter(
|
||||
project_id=project_id,
|
||||
member_id__in=[member.get("member_id") for member in members],
|
||||
@@ -104,13 +102,27 @@ class ProjectMemberViewSet(BaseViewSet):
|
||||
bulk_project_members, ["is_active", "role"], batch_size=100
|
||||
)
|
||||
|
||||
# Get the list of project members of the requested workspace with the given slug
|
||||
project_members = (
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member_id__in=[member.get("member_id") for member in members],
|
||||
)
|
||||
.values("member_id", "sort_order")
|
||||
.order_by("sort_order")
|
||||
)
|
||||
|
||||
# Loop through requested members
|
||||
for member in members:
|
||||
|
||||
# Get the sort orders of the member
|
||||
sort_order = [
|
||||
project_member.get("sort_order")
|
||||
for project_member in project_members
|
||||
if str(project_member.get("member_id"))
|
||||
== str(member.get("member_id"))
|
||||
]
|
||||
# Create a new project member
|
||||
bulk_project_members.append(
|
||||
ProjectMember(
|
||||
member_id=member.get("member_id"),
|
||||
@@ -122,21 +134,23 @@ class ProjectMemberViewSet(BaseViewSet):
|
||||
),
|
||||
)
|
||||
)
|
||||
# Create a new issue property
|
||||
bulk_issue_props.append(
|
||||
IssueProperty(
|
||||
IssueUserProperty(
|
||||
user_id=member.get("member_id"),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
)
|
||||
)
|
||||
|
||||
# Bulk create the project members and issue properties
|
||||
project_members = ProjectMember.objects.bulk_create(
|
||||
bulk_project_members,
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
_ = IssueProperty.objects.bulk_create(
|
||||
_ = IssueUserProperty.objects.bulk_create(
|
||||
bulk_issue_props, batch_size=10, ignore_conflicts=True
|
||||
)
|
||||
|
||||
@@ -144,7 +158,18 @@ class ProjectMemberViewSet(BaseViewSet):
|
||||
project_id=project_id,
|
||||
member_id__in=[member.get("member_id") for member in members],
|
||||
)
|
||||
# Send emails to notify the users
|
||||
[
|
||||
project_add_user_email.delay(
|
||||
base_host(request=request, is_app=True),
|
||||
project_member.id,
|
||||
request.user.id,
|
||||
)
|
||||
for project_member in project_members
|
||||
]
|
||||
# Serialize the project members
|
||||
serializer = ProjectMemberRoleSerializer(project_members, many=True)
|
||||
# Return the serialized data
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
def list(self, request, slug, project_id):
|
||||
@@ -298,7 +323,7 @@ class AddTeamToProjectEndpoint(BaseAPIView):
|
||||
)
|
||||
)
|
||||
issue_props.append(
|
||||
IssueProperty(
|
||||
IssueUserProperty(
|
||||
project_id=project_id,
|
||||
user_id=member,
|
||||
workspace=workspace,
|
||||
@@ -310,7 +335,7 @@ class AddTeamToProjectEndpoint(BaseAPIView):
|
||||
project_members, batch_size=10, ignore_conflicts=True
|
||||
)
|
||||
|
||||
_ = IssueProperty.objects.bulk_create(
|
||||
_ = IssueUserProperty.objects.bulk_create(
|
||||
issue_props, batch_size=10, ignore_conflicts=True
|
||||
)
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# Python imports
|
||||
import re
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Q
|
||||
@@ -11,13 +10,7 @@ from rest_framework.response import Response
|
||||
# Module imports
|
||||
from .base import BaseAPIView
|
||||
from plane.db.models import (
|
||||
Workspace,
|
||||
Project,
|
||||
Issue,
|
||||
Cycle,
|
||||
Module,
|
||||
Page,
|
||||
IssueView,
|
||||
)
|
||||
from plane.utils.issue_search import search_issues
|
||||
|
||||
@@ -59,8 +52,14 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
issue = Issue.issue_objects.get(pk=issue_id)
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id),
|
||||
~Q(issue_related__issue=issue),
|
||||
~Q(issue_relation__related_issue=issue),
|
||||
~Q(
|
||||
issue_related__issue=issue,
|
||||
issue_related__deleted_at__isnull=True,
|
||||
),
|
||||
~Q(
|
||||
issue_relation__related_issue=issue,
|
||||
issue_related__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
if sub_issue == "true" and issue_id:
|
||||
issue = Issue.issue_objects.get(pk=issue_id)
|
||||
|
||||
@@ -35,6 +35,11 @@ from plane.license.models import Instance, InstanceAdmin
|
||||
from plane.utils.cache import cache_response, invalidate_cache
|
||||
from plane.utils.paginator import BasePaginator
|
||||
from plane.authentication.utils.host import user_ip
|
||||
from plane.bgtasks.user_deactivation_email_task import user_deactivation_email
|
||||
from plane.utils.host import base_host
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.cache import cache_control
|
||||
from django.views.decorators.vary import vary_on_cookie
|
||||
|
||||
|
||||
class UserEndpoint(BaseViewSet):
|
||||
@@ -45,6 +50,8 @@ class UserEndpoint(BaseViewSet):
|
||||
return self.request.user
|
||||
|
||||
@cache_response(60 * 60)
|
||||
@method_decorator(cache_control(private=True, max_age=12))
|
||||
@method_decorator(vary_on_cookie)
|
||||
def retrieve(self, request):
|
||||
serialized_data = UserMeSerializer(request.user).data
|
||||
return Response(
|
||||
@@ -53,6 +60,8 @@ class UserEndpoint(BaseViewSet):
|
||||
)
|
||||
|
||||
@cache_response(60 * 60)
|
||||
@method_decorator(cache_control(private=True, max_age=12))
|
||||
@method_decorator(vary_on_cookie)
|
||||
def retrieve_user_settings(self, request):
|
||||
serialized_data = UserMeSettingsSerializer(request.user).data
|
||||
return Response(serialized_data, status=status.HTTP_200_OK)
|
||||
@@ -77,6 +86,9 @@ class UserEndpoint(BaseViewSet):
|
||||
return super().partial_update(request, *args, **kwargs)
|
||||
|
||||
@invalidate_cache(path="/api/users/me/")
|
||||
@invalidate_cache(
|
||||
path="/api/users/me/workspaces/", multiple=True, user=False
|
||||
)
|
||||
def deactivate(self, request):
|
||||
# Check all workspace user is active
|
||||
user = self.get_object()
|
||||
@@ -192,6 +204,11 @@ class UserEndpoint(BaseViewSet):
|
||||
user.last_logout_time = timezone.now()
|
||||
user.save()
|
||||
|
||||
# Send an email to the user
|
||||
user_deactivation_email.delay(
|
||||
base_host(request=request, is_app=True), user.id
|
||||
)
|
||||
|
||||
# Logout the user
|
||||
logout(request)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -281,6 +298,8 @@ class AccountEndpoint(BaseAPIView):
|
||||
|
||||
|
||||
class ProfileEndpoint(BaseAPIView):
|
||||
@method_decorator(cache_control(private=True, max_age=12))
|
||||
@method_decorator(vary_on_cookie)
|
||||
def get(self, request):
|
||||
profile = Profile.objects.get(user=request.user)
|
||||
serializer = ProfileSerializer(profile)
|
||||
|
||||
@@ -14,6 +14,7 @@ from django.db.models.functions import Coalesce
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
from rest_framework import status
|
||||
from django.db import transaction
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
@@ -31,6 +32,8 @@ from plane.db.models import (
|
||||
IssueLink,
|
||||
IssueView,
|
||||
Workspace,
|
||||
WorkspaceMember,
|
||||
ProjectMember,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
@@ -76,32 +79,74 @@ class WorkspaceViewViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, pk):
|
||||
with transaction.atomic():
|
||||
workspace_view = IssueView.objects.select_for_update().get(
|
||||
pk=pk,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
if workspace_view.is_locked:
|
||||
return Response(
|
||||
{"error": "view is locked"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Only update the view if owner is updating
|
||||
if workspace_view.owned_by_id != request.user.id:
|
||||
return Response(
|
||||
{
|
||||
"error": "Only the owner of the view can update the view"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = IssueViewSerializer(
|
||||
workspace_view, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(
|
||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, pk):
|
||||
workspace_view = IssueView.objects.get(
|
||||
pk=pk,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
if workspace_view.is_locked:
|
||||
if not (
|
||||
WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
is_active=True,
|
||||
).exists()
|
||||
and workspace_view.owned_by_id != request.user.id
|
||||
):
|
||||
return Response(
|
||||
{"error": "view is locked"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
{"error": "You do not have permission to delete this view"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
# Only update the view if owner is updating
|
||||
if workspace_view.owned_by_id != request.user.id:
|
||||
return Response(
|
||||
{"error": "Only the owner of the view can update the view"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = IssueViewSerializer(
|
||||
workspace_view, data=request.data, partial=True
|
||||
workspace_member = WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
if (
|
||||
workspace_member.exists()
|
||||
or workspace_view.owned_by == request.user
|
||||
):
|
||||
workspace_view.delete()
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Only admin or owner can delete the view"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class WorkspaceViewIssuesViewSet(BaseViewSet):
|
||||
@@ -344,31 +389,60 @@ class IssueViewViewSet(BaseViewSet):
|
||||
return Response(views, status=status.HTTP_200_OK)
|
||||
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
issue_view = IssueView.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
|
||||
if issue_view.is_locked:
|
||||
return Response(
|
||||
{"error": "view is locked"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
with transaction.atomic():
|
||||
issue_view = IssueView.objects.select_for_update().get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
|
||||
# Only update the view if owner is updating
|
||||
if issue_view.owned_by_id != request.user.id:
|
||||
return Response(
|
||||
{"error": "Only the owner of the view can update the view"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
if issue_view.is_locked:
|
||||
return Response(
|
||||
{"error": "view is locked"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Only update the view if owner is updating
|
||||
if issue_view.owned_by_id != request.user.id:
|
||||
return Response(
|
||||
{
|
||||
"error": "Only the owner of the view can update the view"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = IssueViewSerializer(
|
||||
issue_view, data=request.data, partial=True
|
||||
)
|
||||
|
||||
serializer = IssueViewSerializer(
|
||||
issue_view, data=request.data, partial=True
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(
|
||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
def destroy(self, request, slug, project_id, pk):
|
||||
project_view = IssueView.objects.get(
|
||||
pk=pk,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
if (
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
role=20,
|
||||
is_active=True,
|
||||
).exists()
|
||||
or project_view.owned_by_id == request.user.id
|
||||
):
|
||||
project_view.delete()
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Only admin or owner can delete the view"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class IssueViewFavoriteViewSet(BaseViewSet):
|
||||
@@ -400,5 +474,5 @@ class IssueViewFavoriteViewSet(BaseViewSet):
|
||||
entity_type="view",
|
||||
entity_identifier=view_id,
|
||||
)
|
||||
view_favorite.delete()
|
||||
view_favorite.delete(soft=False)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -44,6 +44,10 @@ from plane.db.models import (
|
||||
WorkspaceTheme,
|
||||
)
|
||||
from plane.utils.cache import cache_response, invalidate_cache
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.cache import cache_control
|
||||
from django.views.decorators.vary import vary_on_cookie
|
||||
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
|
||||
|
||||
|
||||
class WorkSpaceViewSet(BaseViewSet):
|
||||
@@ -118,7 +122,7 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
if serializer.is_valid(raise_exception=True):
|
||||
serializer.save(owner=request.user)
|
||||
# Create Workspace member
|
||||
_ = WorkspaceMember.objects.create(
|
||||
@@ -171,6 +175,8 @@ class UserWorkSpacesEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
@cache_response(60 * 60 * 2)
|
||||
@method_decorator(cache_control(private=True, max_age=12))
|
||||
@method_decorator(vary_on_cookie)
|
||||
def get(self, request):
|
||||
fields = [
|
||||
field
|
||||
@@ -231,7 +237,10 @@ class WorkSpaceAvailabilityCheckEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.filter(slug=slug).exists()
|
||||
workspace = (
|
||||
Workspace.objects.filter(slug=slug).exists()
|
||||
or slug in RESTRICTED_WORKSPACE_SLUGS
|
||||
)
|
||||
return Response({"status": not workspace}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
|
||||
88
apiserver/plane/app/views/workspace/favorite.py
Normal file
88
apiserver/plane/app/views/workspace/favorite.py
Normal file
@@ -0,0 +1,88 @@
|
||||
# Third party modules
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Django modules
|
||||
from django.db.models import Q
|
||||
|
||||
# Module imports
|
||||
from plane.app.views.base import BaseAPIView
|
||||
from plane.db.models import UserFavorite, Workspace
|
||||
from plane.app.serializers import UserFavoriteSerializer
|
||||
from plane.app.permissions import WorkspaceEntityPermission
|
||||
|
||||
|
||||
class WorkspaceFavoriteEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug):
|
||||
# the second filter is to check if the user is a member of the project
|
||||
favorites = UserFavorite.objects.filter(
|
||||
user=request.user,
|
||||
workspace__slug=slug,
|
||||
parent__isnull=True,
|
||||
).filter(
|
||||
Q(project__isnull=True)
|
||||
| (
|
||||
Q(project__isnull=False)
|
||||
& Q(project__project_projectmember__member=request.user)
|
||||
& Q(project__project_projectmember__is_active=True)
|
||||
)
|
||||
)
|
||||
serializer = UserFavoriteSerializer(favorites, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def post(self, request, slug):
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
serializer = UserFavoriteSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
user_id=request.user.id,
|
||||
workspace=workspace,
|
||||
project_id=request.data.get("project_id", None),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def patch(self, request, slug, favorite_id):
|
||||
favorite = UserFavorite.objects.get(
|
||||
user=request.user, workspace__slug=slug, pk=favorite_id
|
||||
)
|
||||
serializer = UserFavoriteSerializer(
|
||||
favorite, data=request.data, partial=True
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request, slug, favorite_id):
|
||||
favorite = UserFavorite.objects.get(
|
||||
user=request.user, workspace__slug=slug, pk=favorite_id
|
||||
)
|
||||
favorite.delete(soft=False)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class WorkspaceFavoriteGroupEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, favorite_id):
|
||||
favorites = UserFavorite.objects.filter(
|
||||
user=request.user,
|
||||
workspace__slug=slug,
|
||||
parent_id=favorite_id,
|
||||
).filter(
|
||||
Q(project__isnull=True)
|
||||
| (
|
||||
Q(project__isnull=False)
|
||||
& Q(project__project_projectmember__member=request.user)
|
||||
& Q(project__project_projectmember__is_active=True)
|
||||
)
|
||||
)
|
||||
serializer = UserFavoriteSerializer(favorites, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -18,6 +18,8 @@ from plane.db.models import (
|
||||
)
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from .error import AuthenticationException, AUTHENTICATION_ERROR_CODES
|
||||
from plane.bgtasks.user_activation_email_task import user_activation_email
|
||||
from plane.authentication.utils.host import base_host
|
||||
|
||||
|
||||
class Adapter:
|
||||
@@ -120,6 +122,13 @@ class Adapter:
|
||||
user.last_login_ip = self.request.META.get("REMOTE_ADDR")
|
||||
user.last_login_uagent = self.request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
# If user is not active, send the activation email and set the user as active
|
||||
if not user.is_active:
|
||||
user_activation_email.delay(
|
||||
base_host(request=self.request), user.id
|
||||
)
|
||||
# Set user as active
|
||||
user.is_active = True
|
||||
user.save()
|
||||
return user
|
||||
|
||||
@@ -168,12 +177,6 @@ class Adapter:
|
||||
# Create profile
|
||||
Profile.objects.create(user=user)
|
||||
|
||||
if not user.is_active:
|
||||
raise AuthenticationException(
|
||||
AUTHENTICATION_ERROR_CODES["USER_ACCOUNT_DEACTIVATED"],
|
||||
error_message="USER_ACCOUNT_DEACTIVATED",
|
||||
)
|
||||
|
||||
# Save user data
|
||||
user = self.save_user_data(user=user)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user