mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Compare commits
517 Commits
feat/edito
...
refactor/l
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
91f5f3f07f | ||
|
|
ec5a2126b1 | ||
|
|
6078ac6f2d | ||
|
|
92f7fd51ef | ||
|
|
139919ddcb | ||
|
|
327c83deea | ||
|
|
1e4fc069de | ||
|
|
e663db5474 | ||
|
|
2e218b1bfb | ||
|
|
c9a4547fa4 | ||
|
|
15f2912d6a | ||
|
|
fb87d4d293 | ||
|
|
ed21da7adf | ||
|
|
0aef74bd47 | ||
|
|
73654a25c4 | ||
|
|
e1380f52ec | ||
|
|
406ffcd7de | ||
|
|
9482fe4509 | ||
|
|
d265635f7e | ||
|
|
3d7098855f | ||
|
|
bf49ebb519 | ||
|
|
4c8e8d985c | ||
|
|
a3a7053be7 | ||
|
|
dbecf5cf5e | ||
|
|
bd20d71fc4 | ||
|
|
b80049d533 | ||
|
|
87dbb9b888 | ||
|
|
c78b2344b8 | ||
|
|
eea6ceaec4 | ||
|
|
7750844fc3 | ||
|
|
18c0b0b0e2 | ||
|
|
f0da532db7 | ||
|
|
5180daae87 | ||
|
|
9f12d13dea | ||
|
|
20b1558dd7 | ||
|
|
22656d0114 | ||
|
|
747905a96d | ||
|
|
b6d596b474 | ||
|
|
a36d4480bd | ||
|
|
3fbfe94f5f | ||
|
|
1cd7259852 | ||
|
|
5840b40d96 | ||
|
|
1ef535af7b | ||
|
|
fd3e3d1a19 | ||
|
|
9910ed6e5f | ||
|
|
539acd58f7 | ||
|
|
a11c12cd7b | ||
|
|
9eaf4b50dd | ||
|
|
e9f486eec6 | ||
|
|
6c3a8a9647 | ||
|
|
2c950713a7 | ||
|
|
e366788176 | ||
|
|
8526b801f4 | ||
|
|
10c253471c | ||
|
|
65b9cfbfe2 | ||
|
|
12a304b04f | ||
|
|
bac5b53ffb | ||
|
|
03c28a11e8 | ||
|
|
bcd08b3159 | ||
|
|
599092d76b | ||
|
|
1d2e7d3fd8 | ||
|
|
9d9a812f7b | ||
|
|
b9f78ba42b | ||
|
|
2e890e4d6f | ||
|
|
c1d3da0cab | ||
|
|
4598b1b49d | ||
|
|
693085577d | ||
|
|
33ab6029dc | ||
|
|
dc2e7ca3d5 | ||
|
|
b14a919c35 | ||
|
|
6d8ba9dfa3 | ||
|
|
0fbe4c4de2 | ||
|
|
22a214795d | ||
|
|
f843a5153b | ||
|
|
3c78292618 | ||
|
|
de273dd618 | ||
|
|
0cce39ec7c | ||
|
|
3ee14771e7 | ||
|
|
59697d34f8 | ||
|
|
7efda1c392 | ||
|
|
fb2a04dc14 | ||
|
|
e6baa6fa2c | ||
|
|
9372677f0c | ||
|
|
716300d964 | ||
|
|
b22bdef9e1 | ||
|
|
23dcdd6407 | ||
|
|
09209694a4 | ||
|
|
88013e3b06 | ||
|
|
51fba04226 | ||
|
|
f39fc3e9ca | ||
|
|
e3cd7050fa | ||
|
|
a19226ac64 | ||
|
|
e7a41b3c32 | ||
|
|
224c8bc0a1 | ||
|
|
83ceba3166 | ||
|
|
08c9bd7949 | ||
|
|
4689ebe2ba | ||
|
|
0dce67b149 | ||
|
|
803992cc98 | ||
|
|
890379b64f | ||
|
|
a0ed51c845 | ||
|
|
d802316c5c | ||
|
|
bd3f117545 | ||
|
|
9065932c86 | ||
|
|
700f3ee823 | ||
|
|
adf891bcba | ||
|
|
48e9042970 | ||
|
|
460003c7f5 | ||
|
|
9f20936c86 | ||
|
|
ae9267e0b0 | ||
|
|
b3bff4c72c | ||
|
|
36c9f8bd83 | ||
|
|
696b1340c5 | ||
|
|
881d0525cc | ||
|
|
c100c0bd85 | ||
|
|
5fc99c9ce5 | ||
|
|
f789c72cac | ||
|
|
650328c6f2 | ||
|
|
ffbc5942da | ||
|
|
854a90c3f1 | ||
|
|
d9b0fe2aaa | ||
|
|
6748065456 | ||
|
|
e6526a31c8 | ||
|
|
bf08d21da6 | ||
|
|
807dfec7ad | ||
|
|
c829b52c0f | ||
|
|
f675ea3f5d | ||
|
|
02e18b4293 | ||
|
|
3729011cb0 | ||
|
|
9e565df11b | ||
|
|
4ca45a971c | ||
|
|
89633d8b2a | ||
|
|
0a1c656865 | ||
|
|
d60e988ca1 | ||
|
|
a36adae995 | ||
|
|
1757b360f3 | ||
|
|
8e87c48249 | ||
|
|
3e83eed398 | ||
|
|
4a71eef72e | ||
|
|
a5a4496800 | ||
|
|
172f39e231 | ||
|
|
56ea45f44c | ||
|
|
729bad4344 | ||
|
|
5f26ce2466 | ||
|
|
c02a54ef31 | ||
|
|
d9c9d85d38 | ||
|
|
edb04a33fd | ||
|
|
033e7703b4 | ||
|
|
3f4c95412d | ||
|
|
4792c1cdf5 | ||
|
|
041f2b16c3 | ||
|
|
91693b2269 | ||
|
|
3ffaa4f2ca | ||
|
|
f817d70f78 | ||
|
|
269e6ccd18 | ||
|
|
6e435df613 | ||
|
|
85f8fe9247 | ||
|
|
6d0cf1b4e9 | ||
|
|
679b0b6465 | ||
|
|
421bf2abc7 | ||
|
|
f457048644 | ||
|
|
24b1e71cbf | ||
|
|
0b72bd373b | ||
|
|
fc205efd6d | ||
|
|
f54e1b922d | ||
|
|
644d1db44c | ||
|
|
b05d72e29a | ||
|
|
48cb0f5afc | ||
|
|
a2098ffb5e | ||
|
|
3b21018154 | ||
|
|
1b624ef3ac | ||
|
|
be82cbb8e8 | ||
|
|
e805c49e69 | ||
|
|
49a895f117 | ||
|
|
943dd593fa | ||
|
|
520938ab5c | ||
|
|
86909cff14 | ||
|
|
598846adc4 | ||
|
|
91142659ca | ||
|
|
806eae0139 | ||
|
|
3279bb6ac9 | ||
|
|
976784bc84 | ||
|
|
983769a944 | ||
|
|
3f9523804b | ||
|
|
9715922fc1 | ||
|
|
2fa92fda75 | ||
|
|
95641f31af | ||
|
|
333a989b1a | ||
|
|
a93dfc1b8d | ||
|
|
07574b4222 | ||
|
|
91e4da502a | ||
|
|
fafa2c06c3 | ||
|
|
86a982e8ce | ||
|
|
dd806dfa2f | ||
|
|
42462c78f7 | ||
|
|
21343034c2 | ||
|
|
f9e7a5826b | ||
|
|
c99f2fcdbb | ||
|
|
0619f1b6d1 | ||
|
|
34820eec7a | ||
|
|
93e6c3b6e0 | ||
|
|
8f8a97589d | ||
|
|
3a5c77e8a4 | ||
|
|
79fbcaa2b2 | ||
|
|
76983a57e9 | ||
|
|
e9b1151702 | ||
|
|
f4f5e5a0d3 | ||
|
|
f55c135052 | ||
|
|
8924e303da | ||
|
|
c89fe9a313 | ||
|
|
b381331b75 | ||
|
|
ee76cb1dc7 | ||
|
|
daaa04c6ea | ||
|
|
67f2e2fdb2 | ||
|
|
18df1530c1 | ||
|
|
dd3df20319 | ||
|
|
569b592711 | ||
|
|
f75df83ca1 | ||
|
|
8415df4cf3 | ||
|
|
3c684ecab7 | ||
|
|
0b01d3e88d | ||
|
|
889393e1d1 | ||
|
|
6fa45d8723 | ||
|
|
88533933b4 | ||
|
|
fffa8648bb | ||
|
|
1f8f6d1b26 | ||
|
|
cce7bddbcc | ||
|
|
518327e380 | ||
|
|
6bb534dabc | ||
|
|
dc2e293058 | ||
|
|
1adfb4dbe4 | ||
|
|
f2af5f0653 | ||
|
|
e3143ff00b | ||
|
|
7b82d1c62f | ||
|
|
3c2aec2776 | ||
|
|
35e58e9ec7 | ||
|
|
ba9d9fd5eb | ||
|
|
040ee4b256 | ||
|
|
f48bc5a876 | ||
|
|
10e9122c1d | ||
|
|
d5cbe3283b | ||
|
|
ae931f8172 | ||
|
|
a8c6483c60 | ||
|
|
9c761a614f | ||
|
|
adf88a0f13 | ||
|
|
5d2983d027 | ||
|
|
8339daa3ee | ||
|
|
4a9e09a54a | ||
|
|
2c609670c8 | ||
|
|
dfcba4dfc1 | ||
|
|
d0e68cdcfb | ||
|
|
43103a1445 | ||
|
|
1c155f6cbe | ||
|
|
1707f4f282 | ||
|
|
c2c2ad0d7a | ||
|
|
1bf8f82ccb | ||
|
|
3bdd91e577 | ||
|
|
1f9c7a4b67 | ||
|
|
d1828c9496 | ||
|
|
3f87d8b99d | ||
|
|
aba6e603a3 | ||
|
|
b4f2176ffa | ||
|
|
4d978c1a8c | ||
|
|
58f203dd38 | ||
|
|
ca088a464f | ||
|
|
0d6e581789 | ||
|
|
c92129ef41 | ||
|
|
d22b633d50 | ||
|
|
a8b2bcc838 | ||
|
|
78481d45d4 | ||
|
|
3a6d3d4e82 | ||
|
|
66c2cbe7d6 | ||
|
|
f5027f4268 | ||
|
|
31fe9a1a02 | ||
|
|
2978593c63 | ||
|
|
8a05cd442c | ||
|
|
c6cdc12165 | ||
|
|
7b6a2343cb | ||
|
|
66aedafe8a | ||
|
|
7af9c7bc33 | ||
|
|
0839666d81 | ||
|
|
68a211d00e | ||
|
|
3545d94025 | ||
|
|
17e46c812a | ||
|
|
73455c8040 | ||
|
|
9c1c0ed166 | ||
|
|
ae45ff158a | ||
|
|
c6909604b1 | ||
|
|
b95d7716e2 | ||
|
|
8577a56068 | ||
|
|
2ee6cd20d8 | ||
|
|
8771c80c9b | ||
|
|
2ad1047323 | ||
|
|
1956da2b90 | ||
|
|
eca79f33b6 | ||
|
|
8f9b568a65 | ||
|
|
a6d111f66d | ||
|
|
f1f7fa907a | ||
|
|
b4feaf973a | ||
|
|
39a607ac0a | ||
|
|
d3c3d3c5ab | ||
|
|
065c9779bb | ||
|
|
cb21dcbcef | ||
|
|
e7948eabf2 | ||
|
|
c2b5464e40 | ||
|
|
e055abb711 | ||
|
|
44a0ff5c67 | ||
|
|
075b8efa99 | ||
|
|
f27c25821c | ||
|
|
aade07b37a | ||
|
|
8107045d8c | ||
|
|
4ce255a872 | ||
|
|
a8c1b8cdef | ||
|
|
78dd15a801 | ||
|
|
2d434f0b9c | ||
|
|
209b700fd9 | ||
|
|
39e3c28ad8 | ||
|
|
cfc70622d6 | ||
|
|
281948c1ce | ||
|
|
2554110397 | ||
|
|
482b363045 | ||
|
|
fff27c60e4 | ||
|
|
474d7ef3c0 | ||
|
|
a7ecfade98 | ||
|
|
996192b9bf | ||
|
|
4cb02a9270 | ||
|
|
85719b9a12 | ||
|
|
0b1f9f0e5b | ||
|
|
d042dac042 | ||
|
|
f2733ab4df | ||
|
|
5464e62a03 | ||
|
|
e4d6e5e1af | ||
|
|
cd85a9fe09 | ||
|
|
6ade86f89d | ||
|
|
65caaa14cd | ||
|
|
0e92cae05f | ||
|
|
9523799f34 | ||
|
|
f5f3c4915f | ||
|
|
08d9e95a86 | ||
|
|
22671ec8a7 | ||
|
|
56331a7b55 | ||
|
|
33d6a8d233 | ||
|
|
4c353b6eeb | ||
|
|
0cc5a5357b | ||
|
|
e758e08785 | ||
|
|
890888a274 | ||
|
|
f7de9a3497 | ||
|
|
830d1c0b5a | ||
|
|
4b0946e093 | ||
|
|
1a26768291 | ||
|
|
c93b826c48 | ||
|
|
ce89c7dcff | ||
|
|
f06095f120 | ||
|
|
dd3b0f6a3f | ||
|
|
24973c1386 | ||
|
|
15b0a448ee | ||
|
|
4d484577b5 | ||
|
|
2d78f6fd22 | ||
|
|
77694ee8ba | ||
|
|
ac8e588ac3 | ||
|
|
2136872351 | ||
|
|
a90724516b | ||
|
|
31f67e189d | ||
|
|
c6db050443 | ||
|
|
f9a3778c7f | ||
|
|
ec1662cbd6 | ||
|
|
7986a28ca2 | ||
|
|
cd540e9641 | ||
|
|
676ec7e396 | ||
|
|
6b12c78cea | ||
|
|
f617937542 | ||
|
|
988201d729 | ||
|
|
6c2b28df91 | ||
|
|
53e5d4b40c | ||
|
|
fd61079c8b | ||
|
|
7767be2e21 | ||
|
|
a623456e63 | ||
|
|
fb586c58d2 | ||
|
|
fb46249ccf | ||
|
|
0e4ce2baa5 | ||
|
|
4e815c0fed | ||
|
|
1cd55cd95b | ||
|
|
d8d476463b | ||
|
|
fc2585bf64 | ||
|
|
6dcbea6d14 | ||
|
|
12c24ad255 | ||
|
|
387dbd89f5 | ||
|
|
b7d792ed07 | ||
|
|
54a5e5e761 | ||
|
|
837f09ed90 | ||
|
|
38f8aa90c1 | ||
|
|
c75091ca3a | ||
|
|
61ce055cb3 | ||
|
|
977b47d35f | ||
|
|
509c258b07 | ||
|
|
3cfb0ac518 | ||
|
|
156f1011f3 | ||
|
|
a36d1a753e | ||
|
|
9a927ded84 | ||
|
|
72f00e378d | ||
|
|
d3ec1aa422 | ||
|
|
dda19b0a3f | ||
|
|
2b570da890 | ||
|
|
5918607171 | ||
|
|
f1496e3144 | ||
|
|
9717497b4e | ||
|
|
2f8c8ac40f | ||
|
|
734e920e08 | ||
|
|
82fa1347d1 | ||
|
|
a7aa5c2ba7 | ||
|
|
825b2f26bf | ||
|
|
af51992eba | ||
|
|
8f59a36bda | ||
|
|
5ad0114aac | ||
|
|
095639b976 | ||
|
|
db722d580f | ||
|
|
0363057d9c | ||
|
|
3dc933f0e8 | ||
|
|
fc33238d89 | ||
|
|
86464c1d6f | ||
|
|
cc479f39a7 | ||
|
|
b50df9ef99 | ||
|
|
c637639a3e | ||
|
|
12401c54cc | ||
|
|
1201a4245e | ||
|
|
fc15ca5565 | ||
|
|
4e8b7e6dbb | ||
|
|
b0bc818362 | ||
|
|
c8491a13b3 | ||
|
|
83587c2c6b | ||
|
|
d9d62c2d5a | ||
|
|
b591203da6 | ||
|
|
78e0405971 | ||
|
|
26040144fc | ||
|
|
0fd36257d7 | ||
|
|
c217185b07 | ||
|
|
a5628c4ce1 | ||
|
|
764e08140c | ||
|
|
26be5dac14 | ||
|
|
35f78e8cfb | ||
|
|
03f7685f8b | ||
|
|
d62654e1d5 | ||
|
|
6e379b93b4 | ||
|
|
e78263e01f | ||
|
|
936c21d65e | ||
|
|
e824c37f36 | ||
|
|
830d4045be | ||
|
|
439d678763 | ||
|
|
f3347c5430 | ||
|
|
fa2cfbbb1b | ||
|
|
184c3332eb | ||
|
|
c2150687a6 | ||
|
|
f2694e0be4 | ||
|
|
ab2e96a915 | ||
|
|
7d4bb3e12b | ||
|
|
96563b438e | ||
|
|
3b2af2d028 | ||
|
|
626464513d | ||
|
|
209dc57307 | ||
|
|
8d5d0422e9 | ||
|
|
1e1a912654 | ||
|
|
589576257e | ||
|
|
b5a2e5c727 | ||
|
|
761fbe3ffb | ||
|
|
d1ec83039c | ||
|
|
1a37c1542d | ||
|
|
90339b1c62 | ||
|
|
ff4de9ac11 | ||
|
|
1f9f821543 | ||
|
|
dbd7756163 | ||
|
|
aa6ad4d712 | ||
|
|
9a255944e4 | ||
|
|
82661589fb | ||
|
|
adaf3b15de | ||
|
|
67784b45fd | ||
|
|
eda1599c0d | ||
|
|
4452630b33 | ||
|
|
141f7409ef | ||
|
|
9e97aa20c2 | ||
|
|
4e97fcd776 | ||
|
|
a3a1e9cf9e | ||
|
|
f087af3946 | ||
|
|
bafe3f9c45 | ||
|
|
99184371f7 | ||
|
|
c919435598 | ||
|
|
635efeab7b | ||
|
|
711494b72e | ||
|
|
245962d5a5 | ||
|
|
c5cd823aaa | ||
|
|
981a8e93ba | ||
|
|
0ce17ff212 | ||
|
|
7da5443808 | ||
|
|
535a27309e | ||
|
|
3eda3845fa | ||
|
|
b6cf177630 | ||
|
|
04eca3c9aa | ||
|
|
e538bfad1f | ||
|
|
7c4c777c99 | ||
|
|
08c4027e77 | ||
|
|
a554d87531 | ||
|
|
dcbd974ac5 | ||
|
|
1c8e709870 | ||
|
|
716a35779c | ||
|
|
ce9d541981 | ||
|
|
3f7516fa9b | ||
|
|
2c33107ef4 | ||
|
|
616e593bc2 | ||
|
|
55a5c2d383 | ||
|
|
adec4e1f2d | ||
|
|
dcdd1ef065 | ||
|
|
367ccba17e | ||
|
|
efab68b19d | ||
|
|
69d67fc02a | ||
|
|
7bb1f7c210 | ||
|
|
d164aa5117 | ||
|
|
8d3755c433 | ||
|
|
c0ad5952df |
@@ -8,6 +8,13 @@ PGDATA="/var/lib/postgresql/data"
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
|
||||
# RabbitMQ Settings
|
||||
RABBITMQ_HOST="plane-mq"
|
||||
RABBITMQ_PORT="5672"
|
||||
RABBITMQ_USER="plane"
|
||||
RABBITMQ_PASSWORD="plane"
|
||||
RABBITMQ_VHOST="plane"
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
2
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
@@ -2,7 +2,7 @@ name: Bug report
|
||||
description: Create a bug report to help us improve Plane
|
||||
title: "[bug]: "
|
||||
labels: [🐛bug]
|
||||
assignees: [srinivaspendem, pushya22]
|
||||
assignees: [vihar, pushya22]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
@@ -2,7 +2,7 @@ name: Feature request
|
||||
description: Suggest a feature to improve Plane
|
||||
title: "[feature]: "
|
||||
labels: [✨feature]
|
||||
assignees: [srinivaspendem, pushya22]
|
||||
assignees: [vihar, pushya22]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
102
.github/workflows/build-aio-base.yml
vendored
102
.github/workflows/build-aio-base.yml
vendored
@@ -2,6 +2,11 @@ name: Build AIO Base Image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
base_tag_name:
|
||||
description: 'Base Tag Name'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
env:
|
||||
TARGET_BRANCH: ${{ github.ref_name }}
|
||||
@@ -16,37 +21,46 @@ jobs:
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
build_base: ${{ steps.changed_files.outputs.base_any_changed }}
|
||||
image_tag: ${{ steps.set_env_variables.outputs.IMAGE_TAG }}
|
||||
|
||||
steps:
|
||||
- id: set_env_variables
|
||||
name: Set Environment Variables
|
||||
run: |
|
||||
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
|
||||
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ github.event.inputs.base_tag_name }}" != "" ]; then
|
||||
echo "IMAGE_TAG=${{ github.event.inputs.base_tag_name }}" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
echo "IMAGE_TAG=latest" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "preview" ]; then
|
||||
echo "IMAGE_TAG=preview" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "IMAGE_TAG=develop" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
uses: tj-actions/changed-files@v42
|
||||
with:
|
||||
files_yaml: |
|
||||
base:
|
||||
- aio/Dockerfile.base
|
||||
|
||||
base_build_push:
|
||||
if: ${{ needs.base_build_setup.outputs.build_base == 'true' || github.event_name == 'workflow_dispatch' || needs.base_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
full_base_build_push:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [base_build_setup]
|
||||
env:
|
||||
BASE_IMG_TAG: makeplane/plane-aio-base:${{ needs.base_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.base_build_setup.outputs.gh_branch_name }}
|
||||
BASE_IMG_TAG: makeplane/plane-aio-base:full-${{ needs.base_build_setup.outputs.image_tag }}
|
||||
BUILDX_DRIVER: ${{ needs.base_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.base_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.base_build_setup.outputs.gh_buildx_platforms }}
|
||||
@@ -55,15 +69,6 @@ jobs:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Docker Tag
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-aio-base:latest
|
||||
else
|
||||
TAG=${{ env.BASE_IMG_TAG }}
|
||||
fi
|
||||
echo "BASE_IMG_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -81,10 +86,53 @@ jobs:
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: ./aio
|
||||
file: ./aio/Dockerfile.base
|
||||
file: ./aio/Dockerfile-base-full
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.BASE_IMG_TAG }}
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
slim_base_build_push:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [base_build_setup]
|
||||
env:
|
||||
BASE_IMG_TAG: makeplane/plane-aio-base:slim-${{ needs.base_build_setup.outputs.image_tag }}
|
||||
BUILDX_DRIVER: ${{ needs.base_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.base_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.base_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.base_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: ./aio
|
||||
file: ./aio/Dockerfile-base-slim
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.BASE_IMG_TAG }}
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
|
||||
207
.github/workflows/build-aio-branch.yml
vendored
Normal file
207
.github/workflows/build-aio-branch.yml
vendored
Normal file
@@ -0,0 +1,207 @@
|
||||
name: Branch Build AIO
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
full:
|
||||
description: 'Run full build'
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
slim:
|
||||
description: 'Run slim build'
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
base_tag_name:
|
||||
description: 'Base Tag Name'
|
||||
required: false
|
||||
default: ''
|
||||
release:
|
||||
types: [released, prereleased]
|
||||
|
||||
env:
|
||||
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
|
||||
FULL_BUILD_INPUT: ${{ github.event.inputs.full }}
|
||||
SLIM_BUILD_INPUT: ${{ github.event.inputs.slim }}
|
||||
|
||||
jobs:
|
||||
branch_build_setup:
|
||||
name: Build Setup
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||
flat_branch_name: ${{ steps.set_env_variables.outputs.FLAT_BRANCH_NAME }}
|
||||
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
aio_base_tag: ${{ steps.set_env_variables.outputs.AIO_BASE_TAG }}
|
||||
do_full_build: ${{ steps.set_env_variables.outputs.DO_FULL_BUILD }}
|
||||
do_slim_build: ${{ steps.set_env_variables.outputs.DO_SLIM_BUILD }}
|
||||
|
||||
steps:
|
||||
- id: set_env_variables
|
||||
name: Set Environment Variables
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ] || [ "${{ github.event_name }}" == "release" ]; then
|
||||
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "AIO_BASE_TAG=latest" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ github.event_name}}" == "workflow_dispatch" ] && [ "${{ github.event.inputs.base_tag_name }}" != "" ]; then
|
||||
echo "AIO_BASE_TAG=${{ github.event.inputs.base_tag_name }}" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "preview" ]; then
|
||||
echo "AIO_BASE_TAG=preview" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "AIO_BASE_TAG=develop" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
fi
|
||||
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ env.FULL_BUILD_INPUT }}" == "true" ] || [ "${{github.event_name}}" == "push" ] || [ "${{github.event_name}}" == "release" ]; then
|
||||
echo "DO_FULL_BUILD=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "DO_FULL_BUILD=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [ "${{ env.SLIM_BUILD_INPUT }}" == "true" ] || [ "${{github.event_name}}" == "push" ] || [ "${{github.event_name}}" == "release" ]; then
|
||||
echo "DO_SLIM_BUILD=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "DO_SLIM_BUILD=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
FLAT_BRANCH_NAME=$(echo "${{ env.TARGET_BRANCH }}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "FLAT_BRANCH_NAME=$FLAT_BRANCH_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
full_build_push:
|
||||
if: ${{ needs.branch_build_setup.outputs.do_full_build == 'true' }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BUILD_TYPE: full
|
||||
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
|
||||
AIO_IMAGE_TAGS: makeplane/plane-aio:full-${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-stable,makeplane/plane-aio:${{env.BUILD_TYPE}}-${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-latest
|
||||
else
|
||||
TAG=${{ env.AIO_IMAGE_TAGS }}
|
||||
fi
|
||||
echo "AIO_IMAGE_TAGS=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./aio/Dockerfile-app
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.AIO_IMAGE_TAGS }}
|
||||
push: true
|
||||
build-args: |
|
||||
BASE_TAG=${{ env.AIO_BASE_TAG }}
|
||||
BUILD_TYPE=${{env.BUILD_TYPE}}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
slim_build_push:
|
||||
if: ${{ needs.branch_build_setup.outputs.do_slim_build == 'true' }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BUILD_TYPE: slim
|
||||
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
|
||||
AIO_IMAGE_TAGS: makeplane/plane-aio:slim-${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-stable,makeplane/plane-aio:${{env.BUILD_TYPE}}-${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-latest
|
||||
else
|
||||
TAG=${{ env.AIO_IMAGE_TAGS }}
|
||||
fi
|
||||
echo "AIO_IMAGE_TAGS=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./aio/Dockerfile-app
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.AIO_IMAGE_TAGS }}
|
||||
push: true
|
||||
build-args: |
|
||||
BASE_TAG=${{ env.AIO_BASE_TAG }}
|
||||
BUILD_TYPE=${{env.BUILD_TYPE}}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
60
.github/workflows/build-branch.yml
vendored
60
.github/workflows/build-branch.yml
vendored
@@ -27,6 +27,7 @@ jobs:
|
||||
build_admin: ${{ steps.changed_files.outputs.admin_any_changed }}
|
||||
build_space: ${{ steps.changed_files.outputs.space_any_changed }}
|
||||
build_web: ${{ steps.changed_files.outputs.web_any_changed }}
|
||||
build_live: ${{ steps.changed_files.outputs.live_any_changed }}
|
||||
|
||||
steps:
|
||||
- id: set_env_variables
|
||||
@@ -79,6 +80,13 @@ jobs:
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
live:
|
||||
- live/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
|
||||
branch_build_push_web:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_web == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
@@ -288,6 +296,58 @@ jobs:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_live:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_live == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
LIVE_TAG: makeplane/plane-live:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Live Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/plane-live:stable,makeplane/plane-live:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-live:latest
|
||||
else
|
||||
TAG=${{ env.LIVE_TAG }}
|
||||
fi
|
||||
echo "LIVE_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Live Server to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./live/Dockerfile.live
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.LIVE_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_proxy:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
318
.github/workflows/feature-deployment.yml
vendored
318
.github/workflows/feature-deployment.yml
vendored
@@ -3,189 +3,108 @@ name: Feature Preview
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
web-build:
|
||||
base_tag_name:
|
||||
description: 'Base Tag Name'
|
||||
required: false
|
||||
description: "Build Web"
|
||||
type: boolean
|
||||
default: true
|
||||
space-build:
|
||||
required: false
|
||||
description: "Build Space"
|
||||
type: boolean
|
||||
default: false
|
||||
admin-build:
|
||||
required: false
|
||||
description: "Build Admin"
|
||||
type: boolean
|
||||
default: false
|
||||
default: 'preview'
|
||||
|
||||
env:
|
||||
BUILD_WEB: ${{ github.event.inputs.web-build }}
|
||||
BUILD_SPACE: ${{ github.event.inputs.space-build }}
|
||||
BUILD_ADMIN: ${{ github.event.inputs.admin-build }}
|
||||
TARGET_BRANCH: ${{ github.ref_name }}
|
||||
|
||||
jobs:
|
||||
setup-feature-build:
|
||||
name: Feature Build Setup
|
||||
branch_build_setup:
|
||||
name: Build Setup
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
run: |
|
||||
echo "BUILD_WEB=$BUILD_WEB"
|
||||
echo "BUILD_SPACE=$BUILD_SPACE"
|
||||
echo "BUILD_ADMIN=$BUILD_ADMIN"
|
||||
outputs:
|
||||
web-build: ${{ env.BUILD_WEB}}
|
||||
space-build: ${{env.BUILD_SPACE}}
|
||||
admin-build: ${{env.BUILD_ADMIN}}
|
||||
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||
flat_branch_name: ${{ steps.set_env_variables.outputs.FLAT_BRANCH_NAME }}
|
||||
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
aio_base_tag: ${{ steps.set_env_variables.outputs.AIO_BASE_TAG }}
|
||||
do_full_build: ${{ steps.set_env_variables.outputs.DO_FULL_BUILD }}
|
||||
do_slim_build: ${{ steps.set_env_variables.outputs.DO_SLIM_BUILD }}
|
||||
|
||||
feature-build-web:
|
||||
if: ${{ needs.setup-feature-build.outputs.web-build == 'true' }}
|
||||
needs: setup-feature-build
|
||||
name: Feature Build Web
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
||||
NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }}
|
||||
steps:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
- name: Install AWS cli
|
||||
- id: set_env_variables
|
||||
name: Set Environment Variables
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y python3-pip
|
||||
pip3 install awscli
|
||||
- name: Checkout
|
||||
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "${{ github.event.inputs.base_tag_name }}" != "" ]; then
|
||||
echo "AIO_BASE_TAG=${{ github.event.inputs.base_tag_name }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "AIO_BASE_TAG=develop" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
FLAT_BRANCH_NAME=$(echo "${{ env.TARGET_BRANCH }}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "FLAT_BRANCH_NAME=$FLAT_BRANCH_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: plane
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/plane
|
||||
yarn install
|
||||
- name: Build Web
|
||||
id: build-web
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/plane
|
||||
yarn build --filter=web
|
||||
cd $GITHUB_WORKSPACE
|
||||
|
||||
TAR_NAME="web.tar.gz"
|
||||
tar -czf $TAR_NAME ./plane
|
||||
|
||||
FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ")
|
||||
aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY
|
||||
|
||||
feature-build-space:
|
||||
if: ${{ needs.setup-feature-build.outputs.space-build == 'true' }}
|
||||
needs: setup-feature-build
|
||||
name: Feature Build Space
|
||||
runs-on: ubuntu-latest
|
||||
full_build_push:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
||||
NEXT_PUBLIC_SPACE_BASE_PATH: "/spaces"
|
||||
NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }}
|
||||
BUILD_TYPE: full
|
||||
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
|
||||
AIO_IMAGE_TAGS: makeplane/plane-aio-feature:${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./aio/Dockerfile-app
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.AIO_IMAGE_TAGS }}
|
||||
push: true
|
||||
build-args:
|
||||
BUILD_TAG=${{ env.AIO_BASE_TAG }}
|
||||
BUILD_TYPE=${{env.BUILD_TYPE}}
|
||||
# cache-from: type=gha
|
||||
# cache-to: type=gha,mode=max
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
outputs:
|
||||
do-build: ${{ needs.setup-feature-build.outputs.space-build }}
|
||||
s3-url: ${{ steps.build-space.outputs.S3_PRESIGNED_URL }}
|
||||
steps:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
- name: Install AWS cli
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y python3-pip
|
||||
pip3 install awscli
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: plane
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/plane
|
||||
yarn install
|
||||
- name: Build Space
|
||||
id: build-space
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/plane
|
||||
yarn build --filter=space
|
||||
cd $GITHUB_WORKSPACE
|
||||
|
||||
TAR_NAME="space.tar.gz"
|
||||
tar -czf $TAR_NAME ./plane
|
||||
|
||||
FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ")
|
||||
aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY
|
||||
|
||||
feature-build-admin:
|
||||
if: ${{ needs.setup-feature-build.outputs.admin-build == 'true' }}
|
||||
needs: setup-feature-build
|
||||
name: Feature Build Admin
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
||||
NEXT_PUBLIC_ADMIN_BASE_PATH: "/god-mode"
|
||||
NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }}
|
||||
outputs:
|
||||
do-build: ${{ needs.setup-feature-build.outputs.admin-build }}
|
||||
s3-url: ${{ steps.build-admin.outputs.S3_PRESIGNED_URL }}
|
||||
steps:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
- name: Install AWS cli
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y python3-pip
|
||||
pip3 install awscli
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: plane
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/plane
|
||||
yarn install
|
||||
- name: Build Admin
|
||||
id: build-admin
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/plane
|
||||
yarn build --filter=admin
|
||||
cd $GITHUB_WORKSPACE
|
||||
|
||||
TAR_NAME="admin.tar.gz"
|
||||
tar -czf $TAR_NAME ./plane
|
||||
|
||||
FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ")
|
||||
aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY
|
||||
AIO_IMAGE_TAGS: ${{ env.AIO_IMAGE_TAGS }}
|
||||
|
||||
feature-deploy:
|
||||
if: ${{ always() && (needs.setup-feature-build.outputs.web-build == 'true' || needs.setup-feature-build.outputs.space-build == 'true' || needs.setup-feature-build.outputs.admin-build == 'true') }}
|
||||
needs:
|
||||
[
|
||||
setup-feature-build,
|
||||
feature-build-web,
|
||||
feature-build-space,
|
||||
feature-build-admin,
|
||||
]
|
||||
needs: [branch_build_setup, full_build_push]
|
||||
name: Feature Deploy
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
||||
KUBE_CONFIG_FILE: ${{ secrets.FEATURE_PREVIEW_KUBE_CONFIG }}
|
||||
DEPLOYMENT_NAME: ${{ needs.branch_build_setup.outputs.flat_branch_name }}
|
||||
steps:
|
||||
- name: Install AWS cli
|
||||
run: |
|
||||
@@ -213,54 +132,37 @@ jobs:
|
||||
./get_helm.sh
|
||||
- name: App Deploy
|
||||
run: |
|
||||
WEB_S3_URL=""
|
||||
if [ ${{ env.BUILD_WEB }} == true ]; then
|
||||
WEB_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/web.tar.gz --expires-in 3600)
|
||||
fi
|
||||
helm --kube-insecure-skip-tls-verify repo add feature-preview ${{ vars.FEATURE_PREVIEW_HELM_CHART_URL }}
|
||||
|
||||
SPACE_S3_URL=""
|
||||
if [ ${{ env.BUILD_SPACE }} == true ]; then
|
||||
SPACE_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/space.tar.gz --expires-in 3600)
|
||||
fi
|
||||
APP_NAMESPACE="${{ vars.FEATURE_PREVIEW_NAMESPACE }}"
|
||||
|
||||
ADMIN_S3_URL=""
|
||||
if [ ${{ env.BUILD_ADMIN }} == true ]; then
|
||||
ADMIN_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/admin.tar.gz --expires-in 3600)
|
||||
fi
|
||||
helm --kube-insecure-skip-tls-verify uninstall \
|
||||
${{ env.DEPLOYMENT_NAME }} \
|
||||
--namespace $APP_NAMESPACE \
|
||||
--timeout 10m0s \
|
||||
--wait \
|
||||
--ignore-not-found
|
||||
|
||||
if [ ${{ env.BUILD_WEB }} == true ] || [ ${{ env.BUILD_SPACE }} == true ] || [ ${{ env.BUILD_ADMIN }} == true ]; then
|
||||
METADATA=$(helm --kube-insecure-skip-tls-verify upgrade \
|
||||
--install=true \
|
||||
--namespace $APP_NAMESPACE \
|
||||
--set dockerhub.loginid=${{ secrets.DOCKERHUB_USERNAME }} \
|
||||
--set dockerhub.password=${{ secrets.DOCKERHUB_TOKEN_RO}} \
|
||||
--set config.feature_branch=${{ env.DEPLOYMENT_NAME }} \
|
||||
--set ingress.primaryDomain=${{vars.FEATURE_PREVIEW_PRIMARY_DOMAIN || 'feature.plane.tools' }} \
|
||||
--set ingress.tls_secret=${{vars.FEATURE_PREVIEW_INGRESS_TLS_SECRET || '' }} \
|
||||
--output json \
|
||||
--timeout 10m0s \
|
||||
--wait \
|
||||
${{ env.DEPLOYMENT_NAME }} feature-preview/${{ vars.FEATURE_PREVIEW_HELM_CHART_NAME }} )
|
||||
|
||||
helm --kube-insecure-skip-tls-verify repo add feature-preview ${{ vars.FEATURE_PREVIEW_HELM_CHART_URL }}
|
||||
APP_NAME=$(echo $METADATA | jq -r '.name')
|
||||
|
||||
APP_NAMESPACE="${{ vars.FEATURE_PREVIEW_NAMESPACE }}"
|
||||
DEPLOY_SCRIPT_URL="${{ vars.FEATURE_PREVIEW_DEPLOY_SCRIPT_URL }}"
|
||||
INGRESS_HOSTNAME=$(kubectl get ingress -n $APP_NAMESPACE --insecure-skip-tls-verify \
|
||||
-o jsonpath='{.items[?(@.metadata.annotations.meta\.helm\.sh\/release-name=="'$APP_NAME'")]}' | \
|
||||
jq -r '.spec.rules[0].host')
|
||||
|
||||
METADATA=$(helm --kube-insecure-skip-tls-verify install feature-preview/${{ vars.FEATURE_PREVIEW_HELM_CHART_NAME }} \
|
||||
--generate-name \
|
||||
--namespace $APP_NAMESPACE \
|
||||
--set ingress.primaryDomain=${{vars.FEATURE_PREVIEW_PRIMARY_DOMAIN || 'feature.plane.tools' }} \
|
||||
--set web.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \
|
||||
--set web.enabled=${{ env.BUILD_WEB || false }} \
|
||||
--set web.artifact_url=$WEB_S3_URL \
|
||||
--set space.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \
|
||||
--set space.enabled=${{ env.BUILD_SPACE || false }} \
|
||||
--set space.artifact_url=$SPACE_S3_URL \
|
||||
--set admin.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \
|
||||
--set admin.enabled=${{ env.BUILD_ADMIN || false }} \
|
||||
--set admin.artifact_url=$ADMIN_S3_URL \
|
||||
--set shared_config.deploy_script_url=$DEPLOY_SCRIPT_URL \
|
||||
--set shared_config.api_base_url=${{vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL}} \
|
||||
--output json \
|
||||
--timeout 1000s)
|
||||
|
||||
APP_NAME=$(echo $METADATA | jq -r '.name')
|
||||
|
||||
INGRESS_HOSTNAME=$(kubectl get ingress -n feature-builds --insecure-skip-tls-verify \
|
||||
-o jsonpath='{.items[?(@.metadata.annotations.meta\.helm\.sh\/release-name=="'$APP_NAME'")]}' | \
|
||||
jq -r '.spec.rules[0].host')
|
||||
|
||||
echo "****************************************"
|
||||
echo "APP NAME ::: $APP_NAME"
|
||||
echo "INGRESS HOSTNAME ::: $INGRESS_HOSTNAME"
|
||||
echo "****************************************"
|
||||
fi
|
||||
echo "****************************************"
|
||||
echo "APP NAME ::: $APP_NAME"
|
||||
echo "INGRESS HOSTNAME ::: $INGRESS_HOSTNAME"
|
||||
echo "****************************************"
|
||||
|
||||
16
.idx/dev.nix
Normal file
16
.idx/dev.nix
Normal file
@@ -0,0 +1,16 @@
|
||||
{ pkgs, ... }: {
|
||||
|
||||
# Which nixpkgs channel to use.
|
||||
channel = "stable-23.11"; # or "unstable"
|
||||
|
||||
# Use https://search.nixos.org/packages to find packages
|
||||
packages = [
|
||||
pkgs.nodejs_20
|
||||
pkgs.python3
|
||||
];
|
||||
|
||||
services.docker.enable = true;
|
||||
services.postgres.enable = true;
|
||||
services.redis.enable = true;
|
||||
|
||||
}
|
||||
85
ENV_SETUP.md
85
ENV_SETUP.md
@@ -1,6 +1,5 @@
|
||||
# Environment Variables
|
||||
|
||||
|
||||
Environment variables are distributed in various files. Please refer them carefully.
|
||||
|
||||
## {PROJECT_FOLDER}/.env
|
||||
@@ -9,17 +8,13 @@ File is available in the project root folder
|
||||
|
||||
```
|
||||
# Database Settings
|
||||
PGUSER="plane"
|
||||
PGPASSWORD="plane"
|
||||
PGHOST="plane-db"
|
||||
PGDATABASE="plane"
|
||||
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||
|
||||
POSTGRES_USER="plane"
|
||||
POSTGRES_PASSWORD="plane"
|
||||
POSTGRES_DB="plane"
|
||||
PGDATA="/var/lib/postgresql/data"
|
||||
# Redis Settings
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
@@ -29,63 +24,39 @@ AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
# Maximum file upload limit
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
|
||||
# GPT settings
|
||||
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||
OPENAI_API_KEY="sk-" # deprecated
|
||||
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||
|
||||
# Settings related to Docker
|
||||
DOCKERIZED=1 # deprecated
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
```
|
||||
|
||||
|
||||
|
||||
## {PROJECT_FOLDER}/web/.env.example
|
||||
|
||||
|
||||
|
||||
```
|
||||
# Public boards deploy URL
|
||||
NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
|
||||
```
|
||||
|
||||
## {PROJECT_FOLDER}/apiserver/.env
|
||||
|
||||
|
||||
|
||||
```
|
||||
# Backend
|
||||
# Debug value for api server use it as 0 for production use
|
||||
DEBUG=0
|
||||
|
||||
CORS_ALLOWED_ORIGINS="http://localhost"
|
||||
# Error logs
|
||||
SENTRY_DSN=""
|
||||
|
||||
SENTRY_ENVIRONMENT="development"
|
||||
# Database Settings
|
||||
PGUSER="plane"
|
||||
PGPASSWORD="plane"
|
||||
PGHOST="plane-db"
|
||||
PGDATABASE="plane"
|
||||
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||
|
||||
POSTGRES_USER="plane"
|
||||
POSTGRES_PASSWORD="plane"
|
||||
POSTGRES_HOST="plane-db"
|
||||
POSTGRES_DB="plane"
|
||||
POSTGRES_PORT=5432
|
||||
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||
# Redis Settings
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||
|
||||
# Email Settings
|
||||
EMAIL_HOST=""
|
||||
EMAIL_HOST_USER=""
|
||||
EMAIL_HOST_PASSWORD=""
|
||||
EMAIL_PORT=587
|
||||
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
||||
EMAIL_USE_TLS="1"
|
||||
EMAIL_USE_SSL="0"
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
@@ -95,35 +66,25 @@ AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
# Maximum file upload limit
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
|
||||
# GPT settings
|
||||
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||
OPENAI_API_KEY="sk-" # deprecated
|
||||
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||
|
||||
# Settings related to Docker
|
||||
DOCKERIZED=1 # Deprecated
|
||||
|
||||
# Github
|
||||
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
||||
|
||||
DOCKERIZED=1 # deprecated
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
|
||||
|
||||
# SignUps
|
||||
ENABLE_SIGNUP="1"
|
||||
|
||||
# Email Redirection URL
|
||||
# Email redirections and minio domain settings
|
||||
WEB_URL="http://localhost"
|
||||
# Gunicorn Workers
|
||||
GUNICORN_WORKERS=2
|
||||
# Base URLs
|
||||
ADMIN_BASE_URL=
|
||||
SPACE_BASE_URL=
|
||||
APP_BASE_URL=
|
||||
SECRET_KEY="gxoytl7dmnc1y37zahah820z5iq3iozu38cnfjtu3yaau9cd9z"
|
||||
```
|
||||
|
||||
## Updates
|
||||
|
||||
- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects.
|
||||
- The naming convention for containers and images has been updated.
|
||||
- The plane-worker image will no longer be maintained, as it has been merged with plane-backend.
|
||||
- The Tiptap pro-extension dependency has been removed, eliminating the need for Tiptap API keys.
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Metadata } from "next";
|
||||
import { AdminLayout } from "@/layouts/admin-layout";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "AI Settings - God Mode",
|
||||
title: "Artificial Intelligence Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function AILayout({ children }: { children: ReactNode }) {
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
import { observer } from "mobx-react";
|
||||
import useSWR from "swr";
|
||||
import { Loader } from "@plane/ui";
|
||||
// components
|
||||
import { PageHeader } from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// components
|
||||
@@ -18,7 +16,6 @@ const InstanceAIPage = observer(() => {
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Artificial Intelligence - God Mode" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<div className="text-xl font-medium text-custom-text-100">AI features for all your workspaces</div>
|
||||
|
||||
@@ -10,6 +10,7 @@ import { IFormattedInstanceConfiguration, TInstanceGithubAuthenticationConfigura
|
||||
import { Button, TOAST_TYPE, getButtonStyling, setToast } from "@plane/ui";
|
||||
// components
|
||||
import {
|
||||
CodeBlock,
|
||||
ConfirmDiscardModal,
|
||||
ControllerInput,
|
||||
CopyField,
|
||||
@@ -102,7 +103,8 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
url: originURL,
|
||||
description: (
|
||||
<>
|
||||
We will auto-generate this. Paste this into the Authorized origin URL field{" "}
|
||||
We will auto-generate this. Paste this into the{" "}
|
||||
<CodeBlock darkerShade>Authorized origin URL</CodeBlock> field{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://github.com/settings/applications/new"
|
||||
@@ -121,7 +123,8 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
url: `${originURL}/auth/github/callback/`,
|
||||
description: (
|
||||
<>
|
||||
We will auto-generate this. Paste this into your Authorized Callback URI field{" "}
|
||||
We will auto-generate this. Paste this into your{" "}
|
||||
<CodeBlock darkerShade>Authorized Callback URI</CodeBlock> field{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://github.com/settings/applications/new"
|
||||
@@ -143,8 +146,8 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
.then((response = []) => {
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Success",
|
||||
message: "Github Configuration Settings updated successfully",
|
||||
title: "Done!",
|
||||
message: "Your GitHub authentication is configured. You should test it now.",
|
||||
});
|
||||
reset({
|
||||
GITHUB_CLIENT_ID: response.find((item) => item.key === "GITHUB_CLIENT_ID")?.value,
|
||||
@@ -170,8 +173,8 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
/>
|
||||
<div className="flex flex-col gap-8">
|
||||
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1">
|
||||
<div className="pt-2 text-xl font-medium">Configuration</div>
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1 pt-1">
|
||||
<div className="pt-2.5 text-xl font-medium">GitHub-provided details for Plane</div>
|
||||
{GITHUB_FORM_FIELDS.map((field) => (
|
||||
<ControllerInput
|
||||
key={field.key}
|
||||
@@ -201,8 +204,8 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
</div>
|
||||
</div>
|
||||
<div className="col-span-2 md:col-span-1">
|
||||
<div className="flex flex-col gap-y-4 px-6 py-4 my-2 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Service provider details</div>
|
||||
<div className="flex flex-col gap-y-4 px-6 pt-1.5 pb-4 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Plane-provided details for GitHub</div>
|
||||
{GITHUB_SERVICE_FIELD.map((field) => (
|
||||
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
|
||||
))}
|
||||
|
||||
@@ -63,7 +63,7 @@ const InstanceGithubAuthenticationPage = observer(() => {
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Authentication - God Mode" />
|
||||
<PageHeader title="GitHub Authentication - Plane Web" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<AuthenticationMethodCard
|
||||
@@ -93,7 +93,7 @@ const InstanceGithubAuthenticationPage = observer(() => {
|
||||
withBorder={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md p-4">
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<InstanceGithubConfigForm config={formattedConfig} />
|
||||
) : (
|
||||
|
||||
@@ -8,6 +8,7 @@ import { IFormattedInstanceConfiguration, TInstanceGitlabAuthenticationConfigura
|
||||
import { Button, TOAST_TYPE, getButtonStyling, setToast } from "@plane/ui";
|
||||
// components
|
||||
import {
|
||||
CodeBlock,
|
||||
ConfirmDiscardModal,
|
||||
ControllerInput,
|
||||
CopyField,
|
||||
@@ -54,7 +55,7 @@ export const InstanceGitlabConfigForm: FC<Props> = (props) => {
|
||||
label: "Host",
|
||||
description: (
|
||||
<>
|
||||
This is the <b>GitLab host</b> to use for login, <b>including scheme</b>.
|
||||
This is either https://gitlab.com or the <CodeBlock>domain.tld</CodeBlock> where you host GitLab.
|
||||
</>
|
||||
),
|
||||
placeholder: "https://gitlab.com",
|
||||
@@ -116,7 +117,8 @@ export const InstanceGitlabConfigForm: FC<Props> = (props) => {
|
||||
url: `${originURL}/auth/gitlab/callback/`,
|
||||
description: (
|
||||
<>
|
||||
We will auto-generate this. Paste this into the <b>Redirect URI</b> field of your{" "}
|
||||
We will auto-generate this. Paste this into the{" "}
|
||||
<CodeBlock darkerShade>Redirect URI</CodeBlock> field of your{" "}
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://docs.gitlab.com/ee/integration/oauth_provider.html"
|
||||
@@ -139,8 +141,8 @@ export const InstanceGitlabConfigForm: FC<Props> = (props) => {
|
||||
.then((response = []) => {
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Success",
|
||||
message: "GitLab Configuration Settings updated successfully",
|
||||
title: "Done!",
|
||||
message: "Your GitLab authentication is configured. You should test it now.",
|
||||
});
|
||||
reset({
|
||||
GITLAB_HOST: response.find((item) => item.key === "GITLAB_HOST")?.value,
|
||||
@@ -167,8 +169,8 @@ export const InstanceGitlabConfigForm: FC<Props> = (props) => {
|
||||
/>
|
||||
<div className="flex flex-col gap-8">
|
||||
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1">
|
||||
<div className="pt-2 text-xl font-medium">Configuration</div>
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1 pt-1">
|
||||
<div className="pt-2.5 text-xl font-medium">GitLab-provided details for Plane</div>
|
||||
{GITLAB_FORM_FIELDS.map((field) => (
|
||||
<ControllerInput
|
||||
key={field.key}
|
||||
@@ -198,8 +200,8 @@ export const InstanceGitlabConfigForm: FC<Props> = (props) => {
|
||||
</div>
|
||||
</div>
|
||||
<div className="col-span-2 md:col-span-1">
|
||||
<div className="flex flex-col gap-y-4 px-6 py-4 my-2 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Service provider details</div>
|
||||
<div className="flex flex-col gap-y-4 px-6 pt-1.5 pb-4 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Plane-provided details for GitLab</div>
|
||||
{GITLAB_SERVICE_FIELD.map((field) => (
|
||||
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
|
||||
))}
|
||||
|
||||
@@ -57,7 +57,7 @@ const InstanceGitlabAuthenticationPage = observer(() => {
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Authentication - God Mode" />
|
||||
<PageHeader title="GitLab Authentication - Plane Web" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<AuthenticationMethodCard
|
||||
@@ -80,7 +80,7 @@ const InstanceGitlabAuthenticationPage = observer(() => {
|
||||
withBorder={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md p-4">
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<InstanceGitlabConfigForm config={formattedConfig} />
|
||||
) : (
|
||||
|
||||
@@ -9,6 +9,7 @@ import { IFormattedInstanceConfiguration, TInstanceGoogleAuthenticationConfigura
|
||||
import { Button, TOAST_TYPE, getButtonStyling, setToast } from "@plane/ui";
|
||||
// components
|
||||
import {
|
||||
CodeBlock,
|
||||
ConfirmDiscardModal,
|
||||
ControllerInput,
|
||||
CopyField,
|
||||
@@ -101,7 +102,8 @@ export const InstanceGoogleConfigForm: FC<Props> = (props) => {
|
||||
url: originURL,
|
||||
description: (
|
||||
<p>
|
||||
We will auto-generate this. Paste this into your Authorized JavaScript origins field. For this OAuth client{" "}
|
||||
We will auto-generate this. Paste this into your{" "}
|
||||
<CodeBlock darkerShade>Authorized JavaScript origins</CodeBlock> field. For this OAuth client{" "}
|
||||
<a
|
||||
href="https://console.cloud.google.com/apis/credentials/oauthclient"
|
||||
target="_blank"
|
||||
@@ -119,7 +121,8 @@ export const InstanceGoogleConfigForm: FC<Props> = (props) => {
|
||||
url: `${originURL}/auth/google/callback/`,
|
||||
description: (
|
||||
<p>
|
||||
We will auto-generate this. Paste this into your Authorized Redirect URI field. For this OAuth client{" "}
|
||||
We will auto-generate this. Paste this into your <CodeBlock darkerShade>Authorized Redirect URI</CodeBlock>{" "}
|
||||
field. For this OAuth client{" "}
|
||||
<a
|
||||
href="https://console.cloud.google.com/apis/credentials/oauthclient"
|
||||
target="_blank"
|
||||
@@ -140,8 +143,8 @@ export const InstanceGoogleConfigForm: FC<Props> = (props) => {
|
||||
.then((response = []) => {
|
||||
setToast({
|
||||
type: TOAST_TYPE.SUCCESS,
|
||||
title: "Success",
|
||||
message: "Google Configuration Settings updated successfully",
|
||||
title: "Done!",
|
||||
message: "Your Google authentication is configured. You should test it now.",
|
||||
});
|
||||
reset({
|
||||
GOOGLE_CLIENT_ID: response.find((item) => item.key === "GOOGLE_CLIENT_ID")?.value,
|
||||
@@ -167,8 +170,8 @@ export const InstanceGoogleConfigForm: FC<Props> = (props) => {
|
||||
/>
|
||||
<div className="flex flex-col gap-8">
|
||||
<div className="grid grid-cols-2 gap-x-12 gap-y-8 w-full">
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1">
|
||||
<div className="pt-2 text-xl font-medium">Configuration</div>
|
||||
<div className="flex flex-col gap-y-4 col-span-2 md:col-span-1 pt-1">
|
||||
<div className="pt-2.5 text-xl font-medium">Google-provided details for Plane</div>
|
||||
{GOOGLE_FORM_FIELDS.map((field) => (
|
||||
<ControllerInput
|
||||
key={field.key}
|
||||
@@ -198,8 +201,8 @@ export const InstanceGoogleConfigForm: FC<Props> = (props) => {
|
||||
</div>
|
||||
</div>
|
||||
<div className="col-span-2 md:col-span-1">
|
||||
<div className="flex flex-col gap-y-4 px-6 py-4 my-2 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Service provider details</div>
|
||||
<div className="flex flex-col gap-y-4 px-6 pt-1.5 pb-4 bg-custom-background-80/60 rounded-lg">
|
||||
<div className="pt-2 text-xl font-medium">Plane-provided details for Google</div>
|
||||
{GOOGLE_SERVICE_DETAILS.map((field) => (
|
||||
<CopyField key={field.key} label={field.label} url={field.url} description={field.description} />
|
||||
))}
|
||||
|
||||
@@ -57,7 +57,7 @@ const InstanceGoogleAuthenticationPage = observer(() => {
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Authentication - God Mode" />
|
||||
<PageHeader title="Google Authentication - Plane Web" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<AuthenticationMethodCard
|
||||
@@ -81,7 +81,7 @@ const InstanceGoogleAuthenticationPage = observer(() => {
|
||||
withBorder={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md p-4">
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<InstanceGoogleConfigForm config={formattedConfig} />
|
||||
) : (
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Metadata } from "next";
|
||||
import { AdminLayout } from "@/layouts/admin-layout";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Authentication Settings - God Mode",
|
||||
title: "Authentication Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function AuthenticationLayout({ children }: { children: ReactNode }) {
|
||||
|
||||
@@ -5,7 +5,6 @@ import { observer } from "mobx-react";
|
||||
import useSWR from "swr";
|
||||
import { TInstanceConfigurationKeys } from "@plane/types";
|
||||
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
|
||||
import { PageHeader } from "@/components/common";
|
||||
// helpers
|
||||
import { cn } from "@/helpers/common.helper";
|
||||
// hooks
|
||||
@@ -34,7 +33,7 @@ const InstanceAuthenticationPage = observer(() => {
|
||||
const updateConfigPromise = updateInstanceConfigurations(payload);
|
||||
|
||||
setPromiseToast(updateConfigPromise, {
|
||||
loading: "Saving Configuration...",
|
||||
loading: "Saving configuration",
|
||||
success: {
|
||||
title: "Success",
|
||||
message: () => "Configuration saved successfully",
|
||||
@@ -57,10 +56,9 @@ const InstanceAuthenticationPage = observer(() => {
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Authentication - God Mode" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<div className="text-xl font-medium text-custom-text-100">Manage authentication for your instance</div>
|
||||
<div className="text-xl font-medium text-custom-text-100">Manage authentication modes for your instance</div>
|
||||
<div className="text-sm font-normal text-custom-text-300">
|
||||
Configure authentication modes for your team and restrict sign ups to be invite only.
|
||||
</div>
|
||||
@@ -68,15 +66,12 @@ const InstanceAuthenticationPage = observer(() => {
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<div className="space-y-3">
|
||||
<div className="text-lg font-medium pb-1">Sign-up configuration</div>
|
||||
<div className={cn("w-full flex items-center gap-14 rounded")}>
|
||||
<div className="flex grow items-center gap-4">
|
||||
<div className="grow">
|
||||
<div className={cn("font-medium leading-5 text-custom-text-100 text-sm")}>
|
||||
Allow anyone to sign up without invite
|
||||
</div>
|
||||
<div className="text-lg font-medium pb-1">Allow anyone to sign up even without an invite</div>
|
||||
<div className={cn("font-normal leading-5 text-custom-text-300 text-xs")}>
|
||||
Toggling this off will disable self sign ups.
|
||||
Toggling this off will only let users sign up when they are invited.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -7,7 +7,7 @@ interface EmailLayoutProps {
|
||||
}
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Email Settings - God Mode",
|
||||
title: "Email Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function EmailLayout({ children }: EmailLayoutProps) {
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
import { observer } from "mobx-react";
|
||||
import useSWR from "swr";
|
||||
import { Loader } from "@plane/ui";
|
||||
// components
|
||||
import { PageHeader } from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// components
|
||||
@@ -18,7 +16,6 @@ const InstanceEmailPage = observer(() => {
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Email - God Mode" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<div className="text-xl font-medium text-custom-text-100">Secure emails from your own instance</div>
|
||||
|
||||
@@ -9,8 +9,9 @@ import { IInstance, IInstanceAdmin } from "@plane/types";
|
||||
import { Button, Input, TOAST_TYPE, ToggleSwitch, setToast } from "@plane/ui";
|
||||
// components
|
||||
import { ControllerInput } from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
import { IntercomConfig } from "./intercom";
|
||||
// hooks
|
||||
|
||||
export interface IGeneralConfigurationForm {
|
||||
instance: IInstance;
|
||||
@@ -20,11 +21,13 @@ export interface IGeneralConfigurationForm {
|
||||
export const GeneralConfigurationForm: FC<IGeneralConfigurationForm> = observer((props) => {
|
||||
const { instance, instanceAdmins } = props;
|
||||
// hooks
|
||||
const { updateInstanceInfo } = useInstance();
|
||||
const { instanceConfigurations, updateInstanceInfo, updateInstanceConfigurations } = useInstance();
|
||||
|
||||
// form data
|
||||
const {
|
||||
handleSubmit,
|
||||
control,
|
||||
watch,
|
||||
formState: { errors, isSubmitting },
|
||||
} = useForm<Partial<IInstance>>({
|
||||
defaultValues: {
|
||||
@@ -36,7 +39,16 @@ export const GeneralConfigurationForm: FC<IGeneralConfigurationForm> = observer(
|
||||
const onSubmit = async (formData: Partial<IInstance>) => {
|
||||
const payload: Partial<IInstance> = { ...formData };
|
||||
|
||||
console.log("payload", payload);
|
||||
// update the intercom configuration
|
||||
const isIntercomEnabled =
|
||||
instanceConfigurations?.find((config) => config.key === "IS_INTERCOM_ENABLED")?.value === "1";
|
||||
if (!payload.is_telemetry_enabled && isIntercomEnabled) {
|
||||
try {
|
||||
await updateInstanceConfigurations({ IS_INTERCOM_ENABLED: "0" });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
await updateInstanceInfo(payload)
|
||||
.then(() =>
|
||||
@@ -74,6 +86,7 @@ export const GeneralConfigurationForm: FC<IGeneralConfigurationForm> = observer(
|
||||
value={instanceAdmins[0]?.user_detail?.email ?? ""}
|
||||
placeholder="Admin email"
|
||||
className="w-full cursor-not-allowed !text-custom-text-400"
|
||||
autoComplete="on"
|
||||
disabled
|
||||
/>
|
||||
</div>
|
||||
@@ -93,7 +106,8 @@ export const GeneralConfigurationForm: FC<IGeneralConfigurationForm> = observer(
|
||||
</div>
|
||||
|
||||
<div className="space-y-3">
|
||||
<div className="text-lg font-medium">Telemetry</div>
|
||||
<div className="text-lg font-medium">Chat + telemetry</div>
|
||||
<IntercomConfig isTelemetryEnabled={watch("is_telemetry_enabled") ?? false} />
|
||||
<div className="flex items-center gap-14 px-4 py-3 border border-custom-border-200 rounded">
|
||||
<div className="grow flex items-center gap-4">
|
||||
<div className="shrink-0">
|
||||
|
||||
82
admin/app/general/intercom.tsx
Normal file
82
admin/app/general/intercom.tsx
Normal file
@@ -0,0 +1,82 @@
|
||||
"use client";
|
||||
|
||||
import { FC, useState } from "react";
|
||||
import { observer } from "mobx-react";
|
||||
import useSWR from "swr";
|
||||
import { MessageSquare } from "lucide-react";
|
||||
import { IFormattedInstanceConfiguration } from "@plane/types";
|
||||
import { ToggleSwitch } from "@plane/ui";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
|
||||
type TIntercomConfig = {
|
||||
isTelemetryEnabled: boolean;
|
||||
};
|
||||
|
||||
export const IntercomConfig: FC<TIntercomConfig> = observer((props) => {
|
||||
const { isTelemetryEnabled } = props;
|
||||
// hooks
|
||||
const { instanceConfigurations, updateInstanceConfigurations, fetchInstanceConfigurations } = useInstance();
|
||||
// states
|
||||
const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
|
||||
|
||||
// derived values
|
||||
const isIntercomEnabled = isTelemetryEnabled
|
||||
? instanceConfigurations
|
||||
? instanceConfigurations?.find((config) => config.key === "IS_INTERCOM_ENABLED")?.value === "1"
|
||||
? true
|
||||
: false
|
||||
: undefined
|
||||
: false;
|
||||
|
||||
const { isLoading } = useSWR(isTelemetryEnabled ? "INSTANCE_CONFIGURATIONS" : null, () =>
|
||||
isTelemetryEnabled ? fetchInstanceConfigurations() : null
|
||||
);
|
||||
|
||||
const initialLoader = isLoading && isIntercomEnabled === undefined;
|
||||
|
||||
const submitInstanceConfigurations = async (payload: Partial<IFormattedInstanceConfiguration>) => {
|
||||
try {
|
||||
await updateInstanceConfigurations(payload);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
} finally {
|
||||
setIsSubmitting(false);
|
||||
}
|
||||
};
|
||||
|
||||
const enableIntercomConfig = () => {
|
||||
submitInstanceConfigurations({ IS_INTERCOM_ENABLED: isIntercomEnabled ? "0" : "1" });
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="flex items-center gap-14 px-4 py-3 border border-custom-border-200 rounded">
|
||||
<div className="grow flex items-center gap-4">
|
||||
<div className="shrink-0">
|
||||
<div className="flex items-center justify-center w-10 h-10 bg-custom-background-80 rounded-full">
|
||||
<MessageSquare className="w-6 h-6 text-custom-text-300/80 p-0.5" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="grow">
|
||||
<div className="text-sm font-medium text-custom-text-100 leading-5">Talk to Plane</div>
|
||||
<div className="text-xs font-normal text-custom-text-300 leading-5">
|
||||
Let your members chat with us via Intercom or another service. Toggling Telemetry off turns this off
|
||||
automatically.
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="ml-auto">
|
||||
<ToggleSwitch
|
||||
value={isIntercomEnabled ? true : false}
|
||||
onChange={enableIntercomConfig}
|
||||
size="sm"
|
||||
disabled={!isTelemetryEnabled || isSubmitting || initialLoader}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
});
|
||||
@@ -3,7 +3,7 @@ import { Metadata } from "next";
|
||||
import { AdminLayout } from "@/layouts/admin-layout";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "General Settings - God Mode",
|
||||
title: "General Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function GeneralLayout({ children }: { children: ReactNode }) {
|
||||
|
||||
@@ -7,7 +7,7 @@ import { GeneralConfigurationForm } from "./form";
|
||||
|
||||
function GeneralPage() {
|
||||
const { instance, instanceAdmins } = useInstance();
|
||||
console.log("instance", instance);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
|
||||
@@ -7,7 +7,7 @@ interface ImageLayoutProps {
|
||||
}
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Images Settings - God Mode",
|
||||
title: "Images Settings - Plane Web",
|
||||
};
|
||||
|
||||
export default function ImageLayout({ children }: ImageLayoutProps) {
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
import { observer } from "mobx-react";
|
||||
import useSWR from "swr";
|
||||
import { Loader } from "@plane/ui";
|
||||
// components
|
||||
import { PageHeader } from "@/components/common";
|
||||
// hooks
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// local
|
||||
@@ -18,7 +16,6 @@ const InstanceImagePage = observer(() => {
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Image - God Mode" />
|
||||
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
|
||||
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
|
||||
<div className="text-xl font-medium text-custom-text-100">Third-party image libraries</div>
|
||||
|
||||
@@ -1,84 +1,48 @@
|
||||
import { observer } from "mobx-react";
|
||||
import Image from "next/image";
|
||||
import { useTheme } from "next-themes";
|
||||
import { KeyRound, Mails } from "lucide-react";
|
||||
// types
|
||||
import { TInstanceAuthenticationMethodKeys, TInstanceAuthenticationModes } from "@plane/types";
|
||||
// components
|
||||
import {
|
||||
AuthenticationMethodCard,
|
||||
EmailCodesConfiguration,
|
||||
GithubConfiguration,
|
||||
GitlabConfiguration,
|
||||
GoogleConfiguration,
|
||||
PasswordLoginConfiguration,
|
||||
} from "@/components/authentication";
|
||||
TGetBaseAuthenticationModeProps,
|
||||
TInstanceAuthenticationMethodKeys,
|
||||
TInstanceAuthenticationModes,
|
||||
} from "@plane/types";
|
||||
// components
|
||||
import { AuthenticationMethodCard } from "@/components/authentication";
|
||||
// helpers
|
||||
import { resolveGeneralTheme } from "@/helpers/common.helper";
|
||||
import { UpgradeButton } from "@/components/common/upgrade-button";
|
||||
import { getBaseAuthenticationModes } from "@/helpers/authentication.helper";
|
||||
// images
|
||||
import githubLightModeImage from "@/public/logos/github-black.png";
|
||||
import githubDarkModeImage from "@/public/logos/github-white.png";
|
||||
import GitlabLogo from "@/public/logos/gitlab-logo.svg";
|
||||
import GoogleLogo from "@/public/logos/google-logo.svg";
|
||||
import OIDCLogo from "@/public/logos/oidc-logo.svg";
|
||||
import SAMLLogo from "@/public/logos/saml-logo.svg";
|
||||
|
||||
export type TAuthenticationModeProps = {
|
||||
disabled: boolean;
|
||||
updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
|
||||
};
|
||||
|
||||
export type TGetAuthenticationModeProps = {
|
||||
disabled: boolean;
|
||||
updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
|
||||
resolvedTheme: string | undefined;
|
||||
};
|
||||
|
||||
// Authentication methods
|
||||
export const getAuthenticationModes: (props: TGetAuthenticationModeProps) => TInstanceAuthenticationModes[] = ({
|
||||
export const getAuthenticationModes: (props: TGetBaseAuthenticationModeProps) => TInstanceAuthenticationModes[] = ({
|
||||
disabled,
|
||||
updateConfig,
|
||||
resolvedTheme,
|
||||
}) => [
|
||||
...getBaseAuthenticationModes({ disabled, updateConfig, resolvedTheme }),
|
||||
{
|
||||
key: "email-codes",
|
||||
name: "Email codes",
|
||||
description: "Login or sign up using codes sent via emails. You need to have email setup here and enabled.",
|
||||
icon: <Mails className="h-6 w-6 p-0.5 text-custom-text-300/80" />,
|
||||
config: <EmailCodesConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
key: "oidc",
|
||||
name: "OIDC",
|
||||
description: "Authenticate your users via the OpenID Connect protocol.",
|
||||
icon: <Image src={OIDCLogo} height={22} width={22} alt="OIDC Logo" />,
|
||||
config: <UpgradeButton />,
|
||||
unavailable: true,
|
||||
},
|
||||
{
|
||||
key: "password-login",
|
||||
name: "Password based login",
|
||||
description: "Allow members to create accounts with passwords for emails to sign in.",
|
||||
icon: <KeyRound className="h-6 w-6 p-0.5 text-custom-text-300/80" />,
|
||||
config: <PasswordLoginConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "google",
|
||||
name: "Google",
|
||||
description: "Allow members to login or sign up to plane with their Google accounts.",
|
||||
icon: <Image src={GoogleLogo} height={20} width={20} alt="Google Logo" />,
|
||||
config: <GoogleConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "github",
|
||||
name: "Github",
|
||||
description: "Allow members to login or sign up to plane with their Github accounts.",
|
||||
icon: (
|
||||
<Image
|
||||
src={resolveGeneralTheme(resolvedTheme) === "dark" ? githubDarkModeImage : githubLightModeImage}
|
||||
height={20}
|
||||
width={20}
|
||||
alt="GitHub Logo"
|
||||
/>
|
||||
),
|
||||
config: <GithubConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "gitlab",
|
||||
name: "GitLab",
|
||||
description: "Allow members to login or sign up to plane with their GitLab accounts.",
|
||||
icon: <Image src={GitlabLogo} height={20} width={20} alt="GitLab Logo" />,
|
||||
config: <GitlabConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
key: "saml",
|
||||
name: "SAML",
|
||||
description: "Authenticate your users via the Security Assertion Markup Language protocol.",
|
||||
icon: <Image src={SAMLLogo} height={22} width={22} alt="SAML Logo" className="pl-0.5" />,
|
||||
config: <UpgradeButton />,
|
||||
unavailable: true,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -97,6 +61,7 @@ export const AuthenticationModes: React.FC<TAuthenticationModeProps> = observer(
|
||||
icon={method.icon}
|
||||
config={method.config}
|
||||
disabled={disabled}
|
||||
unavailable={method.unavailable}
|
||||
/>
|
||||
))}
|
||||
</>
|
||||
|
||||
@@ -96,7 +96,7 @@ export const HelpSection: FC = observer(() => {
|
||||
leaveTo="transform opacity-0 scale-95"
|
||||
>
|
||||
<div
|
||||
className={`absolute bottom-2 min-w-[10rem] ${
|
||||
className={`absolute bottom-2 min-w-[10rem] z-[15] ${
|
||||
isSidebarCollapsed ? "left-full" : "-left-[75px]"
|
||||
} divide-y divide-custom-border-200 whitespace-nowrap rounded bg-custom-background-100 p-1 shadow-custom-shadow-xs`}
|
||||
ref={helpOptionsRef}
|
||||
|
||||
@@ -8,7 +8,7 @@ import { useTheme } from "@/hooks/store";
|
||||
import useOutsideClickDetector from "@/hooks/use-outside-click-detector";
|
||||
// components
|
||||
|
||||
export interface IInstanceSidebar { }
|
||||
export interface IInstanceSidebar {}
|
||||
|
||||
export const InstanceSidebar: FC<IInstanceSidebar> = observer(() => {
|
||||
// store
|
||||
@@ -41,10 +41,10 @@ export const InstanceSidebar: FC<IInstanceSidebar> = observer(() => {
|
||||
<div
|
||||
className={`inset-y-0 z-20 flex h-full flex-shrink-0 flex-grow-0 flex-col border-r border-custom-sidebar-border-200 bg-custom-sidebar-background-100 duration-300
|
||||
fixed md:relative
|
||||
${isSidebarCollapsed ? "-ml-[250px]" : ""}
|
||||
sm:${isSidebarCollapsed ? "-ml-[250px]" : ""}
|
||||
md:ml-0 ${isSidebarCollapsed ? "w-[70px]" : "w-[250px]"}
|
||||
lg:ml-0 ${isSidebarCollapsed ? "w-[70px]" : "w-[250px]"}
|
||||
${isSidebarCollapsed ? "-ml-[290px]" : ""}
|
||||
sm:${isSidebarCollapsed ? "-ml-[290px]" : ""}
|
||||
md:ml-0 ${isSidebarCollapsed ? "w-[70px]" : "w-[290px]"}
|
||||
lg:ml-0 ${isSidebarCollapsed ? "w-[70px]" : "w-[290px]"}
|
||||
`}
|
||||
>
|
||||
<div ref={ref} className="flex h-full w-full flex-1 flex-col">
|
||||
|
||||
29
admin/core/components/authentication/auth-banner.tsx
Normal file
29
admin/core/components/authentication/auth-banner.tsx
Normal file
@@ -0,0 +1,29 @@
|
||||
import { FC } from "react";
|
||||
import { Info, X } from "lucide-react";
|
||||
// helpers
|
||||
import { TAuthErrorInfo } from "@/helpers/authentication.helper";
|
||||
|
||||
type TAuthBanner = {
|
||||
bannerData: TAuthErrorInfo | undefined;
|
||||
handleBannerData?: (bannerData: TAuthErrorInfo | undefined) => void;
|
||||
};
|
||||
|
||||
export const AuthBanner: FC<TAuthBanner> = (props) => {
|
||||
const { bannerData, handleBannerData } = props;
|
||||
|
||||
if (!bannerData) return <></>;
|
||||
return (
|
||||
<div className="relative flex items-center p-2 rounded-md gap-2 border border-custom-primary-100/50 bg-custom-primary-100/10">
|
||||
<div className="w-4 h-4 flex-shrink-0 relative flex justify-center items-center">
|
||||
<Info size={16} className="text-custom-primary-100" />
|
||||
</div>
|
||||
<div className="w-full text-sm font-medium text-custom-primary-100">{bannerData?.message}</div>
|
||||
<div
|
||||
className="relative ml-auto w-6 h-6 rounded-sm flex justify-center items-center transition-all cursor-pointer hover:bg-custom-primary-100/20 text-custom-primary-100/80"
|
||||
onClick={() => handleBannerData && handleBannerData(undefined)}
|
||||
>
|
||||
<X className="w-4 h-4 flex-shrink-0" />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -11,10 +11,11 @@ type Props = {
|
||||
config: JSX.Element;
|
||||
disabled?: boolean;
|
||||
withBorder?: boolean;
|
||||
unavailable?: boolean;
|
||||
};
|
||||
|
||||
export const AuthenticationMethodCard: FC<Props> = (props) => {
|
||||
const { name, description, icon, config, disabled = false, withBorder = true } = props;
|
||||
const { name, description, icon, config, disabled = false, withBorder = true, unavailable = false } = props;
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -22,7 +23,11 @@ export const AuthenticationMethodCard: FC<Props> = (props) => {
|
||||
"px-4 py-3 border border-custom-border-200": withBorder,
|
||||
})}
|
||||
>
|
||||
<div className="flex grow items-center gap-4">
|
||||
<div
|
||||
className={cn("flex grow items-center gap-4", {
|
||||
"opacity-50": unavailable,
|
||||
})}
|
||||
>
|
||||
<div className="shrink-0">
|
||||
<div className="flex h-10 w-10 items-center justify-center rounded-full bg-custom-background-80">{icon}</div>
|
||||
</div>
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export * from "./auth-banner";
|
||||
export * from "./email-config-switch";
|
||||
export * from "./password-config-switch";
|
||||
export * from "./authentication-method-card";
|
||||
|
||||
21
admin/core/components/common/code-block.tsx
Normal file
21
admin/core/components/common/code-block.tsx
Normal file
@@ -0,0 +1,21 @@
|
||||
import { cn } from "@/helpers/common.helper";
|
||||
|
||||
type TProps = {
|
||||
children: React.ReactNode;
|
||||
className?: string;
|
||||
darkerShade?: boolean;
|
||||
};
|
||||
|
||||
export const CodeBlock = ({ children, className, darkerShade }: TProps) => (
|
||||
<span
|
||||
className={cn(
|
||||
"px-0.5 text-xs text-custom-text-300 bg-custom-background-90 font-semibold rounded-md border border-custom-border-100",
|
||||
{
|
||||
"text-custom-text-200 bg-custom-background-80 border-custom-border-200": darkerShade,
|
||||
},
|
||||
className
|
||||
)}
|
||||
>
|
||||
{children}
|
||||
</span>
|
||||
);
|
||||
@@ -38,7 +38,7 @@ export const ControllerInput: React.FC<Props> = (props) => {
|
||||
return (
|
||||
<div className="flex flex-col gap-1">
|
||||
<h4 className="text-sm text-custom-text-300">
|
||||
{label} {!required && "(optional)"}
|
||||
{label}
|
||||
</h4>
|
||||
<div className="relative">
|
||||
<Controller
|
||||
@@ -80,7 +80,7 @@ export const ControllerInput: React.FC<Props> = (props) => {
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
{description && <p className="text-xs text-custom-text-300">{description}</p>}
|
||||
{description && <p className="pt-0.5 text-xs text-custom-text-300">{description}</p>}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -7,3 +7,5 @@ export * from "./banner";
|
||||
export * from "./empty-state";
|
||||
export * from "./logo-spinner";
|
||||
export * from "./page-header";
|
||||
export * from "./code-block";
|
||||
export * from "./upgrade-button";
|
||||
|
||||
16
admin/core/components/common/upgrade-button.tsx
Normal file
16
admin/core/components/common/upgrade-button.tsx
Normal file
@@ -0,0 +1,16 @@
|
||||
"use client";
|
||||
|
||||
import React from "react";
|
||||
// icons
|
||||
import { SquareArrowOutUpRight } from "lucide-react";
|
||||
// ui
|
||||
import { getButtonStyling } from "@plane/ui";
|
||||
// helpers
|
||||
import { cn } from "@/helpers/common.helper";
|
||||
|
||||
export const UpgradeButton: React.FC = () => (
|
||||
<a href="https://plane.so/one" target="_blank" className={cn(getButtonStyling("primary", "sm"))}>
|
||||
Available on One
|
||||
<SquareArrowOutUpRight className="h-3.5 w-3.5 p-0.5" />
|
||||
</a>
|
||||
);
|
||||
@@ -174,6 +174,7 @@ export const InstanceSetupForm: FC = (props) => {
|
||||
placeholder="Wilber"
|
||||
value={formData.first_name}
|
||||
onChange={(e) => handleFormChange("first_name", e.target.value)}
|
||||
autoComplete="on"
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
@@ -190,6 +191,7 @@ export const InstanceSetupForm: FC = (props) => {
|
||||
placeholder="Wright"
|
||||
value={formData.last_name}
|
||||
onChange={(e) => handleFormChange("last_name", e.target.value)}
|
||||
autoComplete="on"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
@@ -208,6 +210,7 @@ export const InstanceSetupForm: FC = (props) => {
|
||||
value={formData.email}
|
||||
onChange={(e) => handleFormChange("email", e.target.value)}
|
||||
hasError={errorData.type && errorData.type === EErrorCodes.INVALID_EMAIL ? true : false}
|
||||
autoComplete="on"
|
||||
/>
|
||||
{errorData.type && errorData.type === EErrorCodes.INVALID_EMAIL && errorData.message && (
|
||||
<p className="px-1 text-xs text-red-500">{errorData.message}</p>
|
||||
@@ -247,6 +250,7 @@ export const InstanceSetupForm: FC = (props) => {
|
||||
hasError={errorData.type && errorData.type === EErrorCodes.INVALID_PASSWORD ? true : false}
|
||||
onFocus={() => setIsPasswordInputFocused(true)}
|
||||
onBlur={() => setIsPasswordInputFocused(false)}
|
||||
autoComplete="on"
|
||||
/>
|
||||
{showPassword.password ? (
|
||||
<button
|
||||
|
||||
@@ -8,8 +8,16 @@ import { Button, Input, Spinner } from "@plane/ui";
|
||||
// components
|
||||
import { Banner } from "@/components/common";
|
||||
// helpers
|
||||
import {
|
||||
authErrorHandler,
|
||||
EAuthenticationErrorCodes,
|
||||
EErrorAlertType,
|
||||
TAuthErrorInfo,
|
||||
} from "@/helpers/authentication.helper";
|
||||
|
||||
import { API_BASE_URL } from "@/helpers/common.helper";
|
||||
import { AuthService } from "@/services/auth.service";
|
||||
import { AuthBanner } from "../authentication";
|
||||
// ui
|
||||
// icons
|
||||
|
||||
@@ -53,12 +61,11 @@ export const InstanceSignInForm: FC = (props) => {
|
||||
const [csrfToken, setCsrfToken] = useState<string | undefined>(undefined);
|
||||
const [formData, setFormData] = useState<TFormData>(defaultFromData);
|
||||
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||
const [errorInfo, setErrorInfo] = useState<TAuthErrorInfo | undefined>(undefined);
|
||||
|
||||
const handleFormChange = (key: keyof TFormData, value: string | boolean) =>
|
||||
setFormData((prev) => ({ ...prev, [key]: value }));
|
||||
|
||||
console.log("csrfToken", csrfToken);
|
||||
|
||||
useEffect(() => {
|
||||
if (csrfToken === undefined)
|
||||
authService.requestCSRFToken().then((data) => data?.csrf_token && setCsrfToken(data.csrf_token));
|
||||
@@ -93,6 +100,15 @@ export const InstanceSignInForm: FC = (props) => {
|
||||
[formData.email, formData.password, isSubmitting]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (errorCode) {
|
||||
const errorDetail = authErrorHandler(errorCode?.toString() as EAuthenticationErrorCodes);
|
||||
if (errorDetail) {
|
||||
setErrorInfo(errorDetail);
|
||||
}
|
||||
}
|
||||
}, [errorCode]);
|
||||
|
||||
return (
|
||||
<div className="flex-grow container mx-auto max-w-lg px-10 lg:max-w-md lg:px-5 py-10 lg:pt-28 transition-all">
|
||||
<div className="relative flex flex-col space-y-6">
|
||||
@@ -105,7 +121,11 @@ export const InstanceSignInForm: FC = (props) => {
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{errorData.type && errorData?.message && <Banner type="error" message={errorData?.message} />}
|
||||
{errorData.type && errorData?.message ? (
|
||||
<Banner type="error" message={errorData?.message} />
|
||||
) : (
|
||||
<>{errorInfo && <AuthBanner bannerData={errorInfo} handleBannerData={(value) => setErrorInfo(value)} />}</>
|
||||
)}
|
||||
|
||||
<form
|
||||
className="space-y-4"
|
||||
@@ -129,6 +149,7 @@ export const InstanceSignInForm: FC = (props) => {
|
||||
placeholder="name@company.com"
|
||||
value={formData.email}
|
||||
onChange={(e) => handleFormChange("email", e.target.value)}
|
||||
autoComplete="on"
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
@@ -147,6 +168,7 @@ export const InstanceSignInForm: FC = (props) => {
|
||||
placeholder="Enter your password"
|
||||
value={formData.password}
|
||||
onChange={(e) => handleFormChange("password", e.target.value)}
|
||||
autoComplete="on"
|
||||
/>
|
||||
{showPassword ? (
|
||||
<button
|
||||
|
||||
@@ -1,7 +1,24 @@
|
||||
import { ReactNode } from "react";
|
||||
import Image from "next/image";
|
||||
import Link from "next/link";
|
||||
import { KeyRound, Mails } from "lucide-react";
|
||||
// types
|
||||
import { TGetBaseAuthenticationModeProps, TInstanceAuthenticationModes } from "@plane/types";
|
||||
// components
|
||||
import {
|
||||
EmailCodesConfiguration,
|
||||
GithubConfiguration,
|
||||
GitlabConfiguration,
|
||||
GoogleConfiguration,
|
||||
PasswordLoginConfiguration,
|
||||
} from "@/components/authentication";
|
||||
// helpers
|
||||
import { SUPPORT_EMAIL } from "./common.helper";
|
||||
import { SUPPORT_EMAIL, resolveGeneralTheme } from "@/helpers/common.helper";
|
||||
// images
|
||||
import githubLightModeImage from "@/public/logos/github-black.png";
|
||||
import githubDarkModeImage from "@/public/logos/github-white.png";
|
||||
import GitlabLogo from "@/public/logos/gitlab-logo.svg";
|
||||
import GoogleLogo from "@/public/logos/google-logo.svg";
|
||||
|
||||
export enum EPageTypes {
|
||||
PUBLIC = "PUBLIC",
|
||||
@@ -134,3 +151,53 @@ export const authErrorHandler = (
|
||||
|
||||
return undefined;
|
||||
};
|
||||
|
||||
export const getBaseAuthenticationModes: (props: TGetBaseAuthenticationModeProps) => TInstanceAuthenticationModes[] = ({
|
||||
disabled,
|
||||
updateConfig,
|
||||
resolvedTheme,
|
||||
}) => [
|
||||
{
|
||||
key: "unique-codes",
|
||||
name: "Unique codes",
|
||||
description:
|
||||
"Log in or sign up for Plane using codes sent via email. You need to have set up SMTP to use this method.",
|
||||
icon: <Mails className="h-6 w-6 p-0.5 text-custom-text-300/80" />,
|
||||
config: <EmailCodesConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "passwords-login",
|
||||
name: "Passwords",
|
||||
description: "Allow members to create accounts with passwords and use it with their email addresses to sign in.",
|
||||
icon: <KeyRound className="h-6 w-6 p-0.5 text-custom-text-300/80" />,
|
||||
config: <PasswordLoginConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "google",
|
||||
name: "Google",
|
||||
description: "Allow members to log in or sign up for Plane with their Google accounts.",
|
||||
icon: <Image src={GoogleLogo} height={20} width={20} alt="Google Logo" />,
|
||||
config: <GoogleConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "github",
|
||||
name: "GitHub",
|
||||
description: "Allow members to log in or sign up for Plane with their GitHub accounts.",
|
||||
icon: (
|
||||
<Image
|
||||
src={resolveGeneralTheme(resolvedTheme) === "dark" ? githubDarkModeImage : githubLightModeImage}
|
||||
height={20}
|
||||
width={20}
|
||||
alt="GitHub Logo"
|
||||
/>
|
||||
),
|
||||
config: <GithubConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
{
|
||||
key: "gitlab",
|
||||
name: "GitLab",
|
||||
description: "Allow members to log in or sign up to plane with their GitLab accounts.",
|
||||
icon: <Image src={GitlabLogo} height={20} width={20} alt="GitLab Logo" />,
|
||||
config: <GitlabConfiguration disabled={disabled} updateConfig={updateConfig} />,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "admin",
|
||||
"version": "0.21.0",
|
||||
"version": "0.22.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "turbo run develop",
|
||||
@@ -14,10 +14,11 @@
|
||||
"@headlessui/react": "^1.7.19",
|
||||
"@plane/types": "*",
|
||||
"@plane/ui": "*",
|
||||
"@plane/constants": "*",
|
||||
"@tailwindcss/typography": "^0.5.9",
|
||||
"@types/lodash": "^4.17.0",
|
||||
"autoprefixer": "10.4.14",
|
||||
"axios": "^1.6.7",
|
||||
"axios": "^1.7.4",
|
||||
"js-cookie": "^3.0.5",
|
||||
"lodash": "^4.17.21",
|
||||
"lucide-react": "^0.356.0",
|
||||
@@ -28,7 +29,7 @@
|
||||
"postcss": "^8.4.38",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-hook-form": "^7.51.0",
|
||||
"react-hook-form": "7.51.5",
|
||||
"swr": "^2.2.4",
|
||||
"tailwindcss": "3.3.2",
|
||||
"uuid": "^9.0.1",
|
||||
|
||||
11
admin/public/logos/oidc-logo.svg
Normal file
11
admin/public/logos/oidc-logo.svg
Normal file
@@ -0,0 +1,11 @@
|
||||
<svg width="92" height="84" viewBox="0 0 92 84" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_3695_11896)">
|
||||
<path d="M83.0398 32.6876C74.2901 27.2397 62.0735 23.8553 48.7013 23.8553C21.7918 23.8553 0 37.3101 0 53.9016C0 69.0898 18.1598 81.554 41.685 83.7001V74.9504C25.8364 72.9693 13.95 64.3022 13.95 53.9016C13.95 42.0977 29.4684 32.44 48.7013 32.44C58.2765 32.44 66.9436 34.8338 73.217 38.7134L64.3022 44.2439H92.1197V27.0746L83.0398 32.6876Z" fill="#CCCCCC"/>
|
||||
<path d="M41.6846 8.99736V74.9504V83.7002L55.6346 74.9504V0L41.6846 8.99736Z" fill="#FF6200"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_3695_11896">
|
||||
<rect width="92" height="84" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 702 B |
17
admin/public/logos/saml-logo.svg
Normal file
17
admin/public/logos/saml-logo.svg
Normal file
@@ -0,0 +1,17 @@
|
||||
<svg width="700" height="650" viewBox="0 0 700 650" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_3262_5767)">
|
||||
<mask id="mask0_3262_5767" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="0" y="0" width="700" height="650">
|
||||
<path d="M700 0H0V650H700V0Z" fill="white"/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_3262_5767)">
|
||||
<path d="M337.682 0L360.832 20.5C377.982 35.7 395.182 50.85 412.132 66.25C521.982 166 614.982 278.25 684.982 407.45C688.582 414.05 691.832 420.85 695.082 427.6L699.982 437.75L694.582 440.6L690.532 434.85L680.032 419.9L672.682 409.2C621.732 335.25 570.682 261.2 500.582 201.95C479.373 183.995 455.969 168.807 430.932 156.75C380.232 132.5 335.132 142.2 296.432 182C259.632 219.85 240.532 266.85 223.282 314.65C221.032 320.8 218.682 326.9 216.332 333L212.232 343.75L203.632 341C208.632 323.6 213.232 306.1 217.832 288.55C228.332 248.8 238.832 209.05 253.432 170.75C268.932 129.95 288.532 90.6 308.082 51.25C316.532 34.2 324.982 17.15 333.082 0H337.682Z" fill="#C22E33"/>
|
||||
<path d="M372.382 491.1C291.082 529.6 94.3829 569.3 1.08287 559.1C-14.1671 478.8 135.482 102.5 208.982 45.5L204.232 56.4C202.115 61.531 199.813 66.5842 197.332 71.55L194.032 78C156.032 151.1 118.082 224.3 98.6329 304.5C91.6287 332.124 87.8038 360.458 87.2328 388.95C86.7328 455.95 128.432 501.55 198.082 504.4C231.582 505.75 265.432 502.25 299.232 498.7C313.932 497.2 328.582 495.65 343.232 494.5C348.632 494.1 353.932 493.45 360.832 492.55L372.382 491.15V491.1Z" fill="#C22E33"/>
|
||||
<path d="M141.233 639.05C118.983 640.75 96.733 642.45 74.583 644.45C279.433 663.95 476.083 630.6 670.083 562.25C606.833 450.75 521.583 362.7 422.483 286.15C423.783 291.05 426.683 294.6 429.533 298.1L431.933 301.1C440.433 312.4 449.333 323.5 458.283 334.6C478.733 360.05 499.183 385.5 514.583 413.5C553.483 484.5 532.383 545.9 456.183 578.3C406.083 599.65 351.333 614.2 297.183 622.9C245.683 631.1 193.433 635.05 141.233 639.05Z" fill="#C22E33"/>
|
||||
</g>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_3262_5767">
|
||||
<rect width="700" height="650" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.0 KiB |
@@ -1,3 +1,5 @@
|
||||
ARG BASE_TAG=develop
|
||||
ARG BUILD_TYPE=full
|
||||
# *****************************************************************************
|
||||
# STAGE 1: Build the project
|
||||
# *****************************************************************************
|
||||
@@ -5,7 +7,6 @@ FROM node:18-alpine AS builder
|
||||
RUN apk add --no-cache libc6-compat
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER
|
||||
|
||||
RUN yarn global add turbo
|
||||
COPY . .
|
||||
@@ -46,16 +47,18 @@ ENV NEXT_PUBLIC_SPACE_BASE_URL=$NEXT_PUBLIC_SPACE_BASE_URL
|
||||
ARG NEXT_PUBLIC_SPACE_BASE_PATH="/spaces"
|
||||
ENV NEXT_PUBLIC_SPACE_BASE_PATH=$NEXT_PUBLIC_SPACE_BASE_PATH
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
ENV TURBO_TELEMETRY_DISABLED 1
|
||||
ARG NEXT_PUBLIC_WEB_BASE_URL=""
|
||||
ENV NEXT_PUBLIC_WEB_BASE_URL=$NEXT_PUBLIC_WEB_BASE_URL
|
||||
|
||||
RUN yarn turbo run build
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
ENV TURBO_TELEMETRY_DISABLED=1
|
||||
|
||||
RUN yarn turbo run build --filter=web --filter=space --filter=admin
|
||||
|
||||
# *****************************************************************************
|
||||
# STAGE 3: Copy the project and start it
|
||||
# *****************************************************************************
|
||||
# FROM makeplane/plane-aio-base AS runner
|
||||
FROM makeplane/plane-aio-base:develop AS runner
|
||||
FROM makeplane/plane-aio-base:${BUILD_TYPE}-${BASE_TAG} AS runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -63,17 +66,14 @@ SHELL [ "/bin/bash", "-c" ]
|
||||
|
||||
# PYTHON APPLICATION SETUP
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
|
||||
COPY apiserver/requirements.txt ./api/
|
||||
COPY apiserver/requirements ./api/requirements
|
||||
|
||||
RUN python3.12 -m venv /app/venv && \
|
||||
source /app/venv/bin/activate && \
|
||||
/app/venv/bin/pip install --upgrade pip && \
|
||||
/app/venv/bin/pip install -r ./api/requirements.txt --compile --no-cache-dir
|
||||
RUN pip install -r ./api/requirements.txt --compile --no-cache-dir
|
||||
|
||||
# Add in Django deps and generate Django's static files
|
||||
COPY apiserver/manage.py ./api/manage.py
|
||||
@@ -87,7 +87,6 @@ RUN chmod +x ./api/bin/*
|
||||
RUN chmod -R 777 ./api/
|
||||
|
||||
# NEXTJS BUILDS
|
||||
|
||||
COPY --from=installer /app/web/next.config.js ./web/
|
||||
COPY --from=installer /app/web/package.json ./web/
|
||||
COPY --from=installer /app/web/.next/standalone ./web
|
||||
@@ -124,26 +123,63 @@ ENV NEXT_PUBLIC_SPACE_BASE_PATH=$NEXT_PUBLIC_SPACE_BASE_PATH
|
||||
ARG NEXT_PUBLIC_WEB_BASE_URL=""
|
||||
ENV NEXT_PUBLIC_WEB_BASE_URL=$NEXT_PUBLIC_WEB_BASE_URL
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
ENV TURBO_TELEMETRY_DISABLED 1
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
ENV TURBO_TELEMETRY_DISABLED=1
|
||||
|
||||
COPY aio/supervisord.conf /app/supervisord.conf
|
||||
ARG BUILD_TYPE=full
|
||||
ENV BUILD_TYPE=$BUILD_TYPE
|
||||
|
||||
COPY aio/aio.sh /app/aio.sh
|
||||
RUN chmod +x /app/aio.sh
|
||||
COPY aio/supervisord-${BUILD_TYPE}-base /app/supervisord.conf
|
||||
COPY aio/supervisord-app /app/supervisord-app
|
||||
RUN cat /app/supervisord-app >> /app/supervisord.conf && \
|
||||
rm /app/supervisord-app
|
||||
|
||||
COPY ./aio/nginx.conf /etc/nginx/nginx.conf.template
|
||||
|
||||
# if build type is full, run the below copy pg-setup.sh
|
||||
COPY aio/postgresql.conf /etc/postgresql/postgresql.conf
|
||||
COPY aio/pg-setup.sh /app/pg-setup.sh
|
||||
RUN chmod +x /app/pg-setup.sh
|
||||
|
||||
COPY deploy/selfhost/variables.env /app/plane.env
|
||||
# *****************************************************************************
|
||||
# APPLICATION ENVIRONMENT SETTINGS
|
||||
# *****************************************************************************
|
||||
ENV APP_DOMAIN=localhost
|
||||
|
||||
# NGINX Conf Copy
|
||||
COPY ./aio/nginx.conf.aio /etc/nginx/nginx.conf.template
|
||||
COPY ./nginx/env.sh /app/nginx-start.sh
|
||||
RUN chmod +x /app/nginx-start.sh
|
||||
ENV WEB_URL=http://${APP_DOMAIN}
|
||||
ENV DEBUG=0
|
||||
ENV SENTRY_DSN=
|
||||
ENV SENTRY_ENVIRONMENT=production
|
||||
ENV CORS_ALLOWED_ORIGINS=http://${APP_DOMAIN},https://${APP_DOMAIN}
|
||||
# Secret Key
|
||||
ENV SECRET_KEY=60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5
|
||||
# Gunicorn Workers
|
||||
ENV GUNICORN_WORKERS=1
|
||||
|
||||
RUN ./pg-setup.sh
|
||||
ENV POSTGRES_USER="plane"
|
||||
ENV POSTGRES_PASSWORD="plane"
|
||||
ENV POSTGRES_DB="plane"
|
||||
ENV POSTGRES_HOST="localhost"
|
||||
ENV POSTGRES_PORT="5432"
|
||||
ENV DATABASE_URL="postgresql://plane:plane@localhost:5432/plane"
|
||||
|
||||
VOLUME [ "/app/data/minio/uploads", "/var/lib/postgresql/data" ]
|
||||
ENV REDIS_HOST="localhost"
|
||||
ENV REDIS_PORT="6379"
|
||||
ENV REDIS_URL="redis://localhost:6379"
|
||||
|
||||
ENV USE_MINIO="1"
|
||||
ENV AWS_REGION=""
|
||||
ENV AWS_ACCESS_KEY_ID="access-key"
|
||||
ENV AWS_SECRET_ACCESS_KEY="secret-key"
|
||||
ENV AWS_S3_ENDPOINT_URL="http://localhost:9000"
|
||||
ENV AWS_S3_BUCKET_NAME="uploads"
|
||||
ENV MINIO_ROOT_USER="access-key"
|
||||
ENV MINIO_ROOT_PASSWORD="secret-key"
|
||||
ENV BUCKET_NAME="uploads"
|
||||
ENV FILE_SIZE_LIMIT="5242880"
|
||||
|
||||
# *****************************************************************************
|
||||
|
||||
RUN /app/pg-setup.sh
|
||||
|
||||
CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"]
|
||||
@@ -1,19 +1,28 @@
|
||||
FROM --platform=$BUILDPLATFORM tonistiigi/binfmt as binfmt
|
||||
FROM --platform=$BUILDPLATFORM tonistiigi/binfmt AS binfmt
|
||||
|
||||
FROM debian:12-slim
|
||||
FROM python:3.12-slim
|
||||
|
||||
# Set environment variables to non-interactive for apt
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV BUILD_TYPE=full
|
||||
|
||||
SHELL [ "/bin/bash", "-c" ]
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN mkdir -p /app/{data,logs} && \
|
||||
mkdir -p /app/data/{redis,pg,minio,nginx} && \
|
||||
mkdir -p /app/logs/{access,error} && \
|
||||
mkdir -p /etc/supervisor/conf.d
|
||||
|
||||
# Update the package list and install prerequisites
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
gnupg2 curl ca-certificates lsb-release software-properties-common \
|
||||
build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev \
|
||||
libsqlite3-dev wget llvm libncurses5-dev libncursesw5-dev xz-utils \
|
||||
tk-dev libffi-dev liblzma-dev supervisor nginx nano vim ncdu
|
||||
tk-dev libffi-dev liblzma-dev supervisor nginx nano vim ncdu \
|
||||
sudo lsof net-tools libpq-dev procps gettext
|
||||
|
||||
# Install Redis 7.2
|
||||
RUN echo "deb http://deb.debian.org/debian $(lsb_release -cs)-backports main" > /etc/apt/sources.list.d/backports.list && \
|
||||
@@ -23,13 +32,15 @@ RUN echo "deb http://deb.debian.org/debian $(lsb_release -cs)-backports main" >
|
||||
apt-get install -y redis-server
|
||||
|
||||
# Install PostgreSQL 15
|
||||
ENV POSTGRES_VERSION 15
|
||||
ENV POSTGRES_VERSION=15
|
||||
RUN curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /usr/share/keyrings/pgdg-archive-keyring.gpg && \
|
||||
echo "deb [signed-by=/usr/share/keyrings/pgdg-archive-keyring.gpg] http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
||||
apt-get update && \
|
||||
apt-get install -y postgresql-$POSTGRES_VERSION postgresql-client-$POSTGRES_VERSION && \
|
||||
mkdir -p /var/lib/postgresql/data && \
|
||||
chown -R postgres:postgres /var/lib/postgresql
|
||||
COPY postgresql.conf /etc/postgresql/postgresql.conf
|
||||
RUN sudo -u postgres /usr/lib/postgresql/$POSTGRES_VERSION/bin/initdb -D /var/lib/postgresql/data
|
||||
|
||||
# Install MinIO
|
||||
ARG TARGETARCH
|
||||
@@ -42,51 +53,21 @@ RUN if [ "$TARGETARCH" = "amd64" ]; then \
|
||||
fi && \
|
||||
chmod +x /usr/local/bin/minio
|
||||
|
||||
|
||||
# Install Node.js 18
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \
|
||||
apt-get install -y nodejs
|
||||
|
||||
# Install Python 3.12 from source
|
||||
RUN cd /usr/src && \
|
||||
wget https://www.python.org/ftp/python/3.12.0/Python-3.12.0.tgz && \
|
||||
tar xzf Python-3.12.0.tgz && \
|
||||
cd Python-3.12.0 && \
|
||||
./configure --enable-optimizations && \
|
||||
make altinstall && \
|
||||
rm -f /usr/src/Python-3.12.0.tgz
|
||||
|
||||
RUN python3.12 -m pip install --upgrade pip
|
||||
|
||||
RUN echo "alias python=/usr/local/bin/python3.12" >> ~/.bashrc && \
|
||||
echo "alias pip=/usr/local/bin/pip3.12" >> ~/.bashrc
|
||||
|
||||
# Clean up
|
||||
RUN apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* /usr/src/Python-3.12.0
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN mkdir -p /app/{data,logs} && \
|
||||
mkdir -p /app/data/{redis,pg,minio,nginx} && \
|
||||
mkdir -p /app/logs/{access,error} && \
|
||||
mkdir -p /etc/supervisor/conf.d
|
||||
apt-get install -y nodejs && \
|
||||
python -m pip install --upgrade pip && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create Supervisor configuration file
|
||||
COPY supervisord.base /app/supervisord.conf
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y sudo lsof net-tools libpq-dev procps gettext && \
|
||||
apt-get clean
|
||||
|
||||
RUN sudo -u postgres /usr/lib/postgresql/$POSTGRES_VERSION/bin/initdb -D /var/lib/postgresql/data
|
||||
COPY postgresql.conf /etc/postgresql/postgresql.conf
|
||||
|
||||
RUN echo "alias python=/usr/local/bin/python3.12" >> ~/.bashrc && \
|
||||
echo "alias pip=/usr/local/bin/pip3.12" >> ~/.bashrc
|
||||
COPY supervisord-full-base /app/supervisord.conf
|
||||
COPY nginx.conf /etc/nginx/nginx.conf.template
|
||||
COPY env.sh /app/nginx-start.sh
|
||||
RUN chmod +x /app/nginx-start.sh
|
||||
|
||||
# Expose ports for Redis, PostgreSQL, and MinIO
|
||||
EXPOSE 6379 5432 9000 80
|
||||
EXPOSE 6379 5432 9000 80 443
|
||||
|
||||
# Start Supervisor
|
||||
CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"]
|
||||
45
aio/Dockerfile-base-slim
Normal file
45
aio/Dockerfile-base-slim
Normal file
@@ -0,0 +1,45 @@
|
||||
FROM --platform=$BUILDPLATFORM tonistiigi/binfmt AS binfmt
|
||||
|
||||
FROM python:3.12-slim
|
||||
|
||||
# Set environment variables to non-interactive for apt
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV BUILD_TYPE=slim
|
||||
|
||||
SHELL [ "/bin/bash", "-c" ]
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN mkdir -p /app/{data,logs} && \
|
||||
mkdir -p /app/data/{nginx} && \
|
||||
mkdir -p /app/logs/{access,error} && \
|
||||
mkdir -p /etc/supervisor/conf.d
|
||||
|
||||
# Update the package list and install prerequisites
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
gnupg2 curl ca-certificates lsb-release software-properties-common \
|
||||
build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev \
|
||||
libsqlite3-dev wget llvm libncurses5-dev libncursesw5-dev xz-utils \
|
||||
tk-dev libffi-dev liblzma-dev supervisor nginx nano vim ncdu \
|
||||
sudo lsof net-tools libpq-dev procps gettext
|
||||
|
||||
# Install Node.js 18
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \
|
||||
apt-get install -y nodejs
|
||||
|
||||
RUN python -m pip install --upgrade pip && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create Supervisor configuration file
|
||||
COPY supervisord-slim-base /app/supervisord.conf
|
||||
COPY nginx.conf /etc/nginx/nginx.conf.template
|
||||
COPY env.sh /app/nginx-start.sh
|
||||
RUN chmod +x /app/nginx-start.sh
|
||||
|
||||
# Expose ports for Redis, PostgreSQL, and MinIO
|
||||
EXPOSE 80 443
|
||||
|
||||
# Start Supervisor
|
||||
CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"]
|
||||
30
aio/aio.sh
30
aio/aio.sh
@@ -1,30 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
|
||||
if [ "$1" = 'api' ]; then
|
||||
source /app/venv/bin/activate
|
||||
cd /app/api
|
||||
exec ./bin/docker-entrypoint-api.sh
|
||||
elif [ "$1" = 'worker' ]; then
|
||||
source /app/venv/bin/activate
|
||||
cd /app/api
|
||||
exec ./bin/docker-entrypoint-worker.sh
|
||||
elif [ "$1" = 'beat' ]; then
|
||||
source /app/venv/bin/activate
|
||||
cd /app/api
|
||||
exec ./bin/docker-entrypoint-beat.sh
|
||||
elif [ "$1" = 'migrator' ]; then
|
||||
source /app/venv/bin/activate
|
||||
cd /app/api
|
||||
exec ./bin/docker-entrypoint-migrator.sh
|
||||
elif [ "$1" = 'web' ]; then
|
||||
node /app/web/web/server.js
|
||||
elif [ "$1" = 'space' ]; then
|
||||
node /app/space/space/server.js
|
||||
elif [ "$1" = 'admin' ]; then
|
||||
node /app/admin/admin/server.js
|
||||
else
|
||||
echo "Command not found"
|
||||
exit 1
|
||||
fi
|
||||
7
aio/env.sh
Normal file
7
aio/env.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
export dollar="$"
|
||||
export http_upgrade="http_upgrade"
|
||||
export scheme="scheme"
|
||||
envsubst < /etc/nginx/nginx.conf.template > /etc/nginx/nginx.conf
|
||||
exec nginx -g 'daemon off;'
|
||||
@@ -37,7 +37,6 @@ http {
|
||||
proxy_pass http://localhost:3002/spaces/;
|
||||
}
|
||||
|
||||
|
||||
location /god-mode/ {
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade ${dollar}http_upgrade;
|
||||
@@ -1,14 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ "$BUILD_TYPE" == "full" ]; then
|
||||
|
||||
# Variables
|
||||
set -o allexport
|
||||
source plane.env set
|
||||
set +o allexport
|
||||
export PGHOST=localhost
|
||||
|
||||
export PGHOST=localhost
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/pg_ctl" -D /var/lib/postgresql/data start
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/psql" --command "CREATE USER $POSTGRES_USER WITH SUPERUSER PASSWORD '$POSTGRES_PASSWORD';" && \
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/createdb" -O "$POSTGRES_USER" "$POSTGRES_DB" && \
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/psql" --command "GRANT ALL PRIVILEGES ON DATABASE $POSTGRES_DB TO $POSTGRES_USER;" && \
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/pg_ctl" -D /var/lib/postgresql/data stop
|
||||
|
||||
fi
|
||||
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/pg_ctl" -D /var/lib/postgresql/data start
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/psql" --command "CREATE USER $POSTGRES_USER WITH SUPERUSER PASSWORD '$POSTGRES_PASSWORD';" && \
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/createdb" -O "$POSTGRES_USER" "$POSTGRES_DB" && \
|
||||
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/pg_ctl" -D /var/lib/postgresql/data stop
|
||||
|
||||
@@ -1,12 +1,815 @@
|
||||
# -----------------------------
|
||||
# PostgreSQL configuration file
|
||||
# -----------------------------
|
||||
#
|
||||
# This file consists of lines of the form:
|
||||
#
|
||||
# name = value
|
||||
#
|
||||
# (The "=" is optional.) Whitespace may be used. Comments are introduced with
|
||||
# "#" anywhere on a line. The complete list of parameter names and allowed
|
||||
# values can be found in the PostgreSQL documentation.
|
||||
#
|
||||
# The commented-out settings shown in this file represent the default values.
|
||||
# Re-commenting a setting is NOT sufficient to revert it to the default value;
|
||||
# you need to reload the server.
|
||||
#
|
||||
# This file is read on server startup and when the server receives a SIGHUP
|
||||
# signal. If you edit the file on a running system, you have to SIGHUP the
|
||||
# server for the changes to take effect, run "pg_ctl reload", or execute
|
||||
# "SELECT pg_reload_conf()". Some parameters, which are marked below,
|
||||
# require a server shutdown and restart to take effect.
|
||||
#
|
||||
# Any parameter can also be given as a command-line option to the server, e.g.,
|
||||
# "postgres -c log_connections=on". Some parameters can be changed at run time
|
||||
# with the "SET" SQL command.
|
||||
#
|
||||
# Memory units: B = bytes Time units: us = microseconds
|
||||
# kB = kilobytes ms = milliseconds
|
||||
# MB = megabytes s = seconds
|
||||
# GB = gigabytes min = minutes
|
||||
# TB = terabytes h = hours
|
||||
# d = days
|
||||
|
||||
# Allow connections from any IP address
|
||||
listen_addresses = '*'
|
||||
|
||||
# Set the maximum number of connections
|
||||
max_connections = 100
|
||||
#------------------------------------------------------------------------------
|
||||
# FILE LOCATIONS
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# Set the shared buffers size
|
||||
shared_buffers = 128MB
|
||||
# The default values of these variables are driven from the -D command-line
|
||||
# option or PGDATA environment variable, represented here as ConfigDir.
|
||||
|
||||
# Other custom configurations can be added here
|
||||
data_directory = '/var/lib/postgresql/data' # use data in another directory
|
||||
# (change requires restart)
|
||||
hba_file = '/etc/postgresql/15/main/pg_hba.conf' # host-based authentication file
|
||||
# (change requires restart)
|
||||
ident_file = '/etc/postgresql/15/main/pg_ident.conf' # ident configuration file
|
||||
# (change requires restart)
|
||||
|
||||
# If external_pid_file is not explicitly set, no extra PID file is written.
|
||||
external_pid_file = '/var/run/postgresql/15-main.pid' # write an extra PID file
|
||||
# (change requires restart)
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# CONNECTIONS AND AUTHENTICATION
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Connection Settings -
|
||||
|
||||
listen_addresses = 'localhost' # what IP address(es) to listen on;
|
||||
# comma-separated list of addresses;
|
||||
# defaults to 'localhost'; use '*' for all
|
||||
# (change requires restart)
|
||||
port = 5432 # (change requires restart)
|
||||
max_connections = 200 # (change requires restart)
|
||||
#superuser_reserved_connections = 3 # (change requires restart)
|
||||
unix_socket_directories = '/var/run/postgresql' # comma-separated list of directories
|
||||
# (change requires restart)
|
||||
#unix_socket_group = '' # (change requires restart)
|
||||
#unix_socket_permissions = 0777 # begin with 0 to use octal notation
|
||||
# (change requires restart)
|
||||
#bonjour = off # advertise server via Bonjour
|
||||
# (change requires restart)
|
||||
#bonjour_name = '' # defaults to the computer name
|
||||
# (change requires restart)
|
||||
|
||||
# - TCP settings -
|
||||
# see "man tcp" for details
|
||||
|
||||
#tcp_keepalives_idle = 0 # TCP_KEEPIDLE, in seconds;
|
||||
# 0 selects the system default
|
||||
#tcp_keepalives_interval = 0 # TCP_KEEPINTVL, in seconds;
|
||||
# 0 selects the system default
|
||||
#tcp_keepalives_count = 0 # TCP_KEEPCNT;
|
||||
# 0 selects the system default
|
||||
#tcp_user_timeout = 0 # TCP_USER_TIMEOUT, in milliseconds;
|
||||
# 0 selects the system default
|
||||
|
||||
#client_connection_check_interval = 0 # time between checks for client
|
||||
# disconnection while running queries;
|
||||
# 0 for never
|
||||
|
||||
# - Authentication -
|
||||
|
||||
#authentication_timeout = 1min # 1s-600s
|
||||
#password_encryption = scram-sha-256 # scram-sha-256 or md5
|
||||
#db_user_namespace = off
|
||||
|
||||
# GSSAPI using Kerberos
|
||||
#krb_server_keyfile = 'FILE:${sysconfdir}/krb5.keytab'
|
||||
#krb_caseins_users = off
|
||||
|
||||
# - SSL -
|
||||
|
||||
ssl = on
|
||||
#ssl_ca_file = ''
|
||||
ssl_cert_file = '/etc/ssl/certs/ssl-cert-snakeoil.pem'
|
||||
#ssl_crl_file = ''
|
||||
#ssl_crl_dir = ''
|
||||
ssl_key_file = '/etc/ssl/private/ssl-cert-snakeoil.key'
|
||||
#ssl_ciphers = 'HIGH:MEDIUM:+3DES:!aNULL' # allowed SSL ciphers
|
||||
#ssl_prefer_server_ciphers = on
|
||||
#ssl_ecdh_curve = 'prime256v1'
|
||||
#ssl_min_protocol_version = 'TLSv1.2'
|
||||
#ssl_max_protocol_version = ''
|
||||
#ssl_dh_params_file = ''
|
||||
#ssl_passphrase_command = ''
|
||||
#ssl_passphrase_command_supports_reload = off
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# RESOURCE USAGE (except WAL)
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Memory -
|
||||
|
||||
shared_buffers = 256MB # min 128kB
|
||||
# (change requires restart)
|
||||
#huge_pages = try # on, off, or try
|
||||
# (change requires restart)
|
||||
#huge_page_size = 0 # zero for system default
|
||||
# (change requires restart)
|
||||
#temp_buffers = 8MB # min 800kB
|
||||
#max_prepared_transactions = 0 # zero disables the feature
|
||||
# (change requires restart)
|
||||
# Caution: it is not advisable to set max_prepared_transactions nonzero unless
|
||||
# you actively intend to use prepared transactions.
|
||||
#work_mem = 4MB # min 64kB
|
||||
#hash_mem_multiplier = 2.0 # 1-1000.0 multiplier on hash table work_mem
|
||||
#maintenance_work_mem = 64MB # min 1MB
|
||||
#autovacuum_work_mem = -1 # min 1MB, or -1 to use maintenance_work_mem
|
||||
#logical_decoding_work_mem = 64MB # min 64kB
|
||||
#max_stack_depth = 2MB # min 100kB
|
||||
#shared_memory_type = mmap # the default is the first option
|
||||
# supported by the operating system:
|
||||
# mmap
|
||||
# sysv
|
||||
# windows
|
||||
# (change requires restart)
|
||||
dynamic_shared_memory_type = posix # the default is usually the first option
|
||||
# supported by the operating system:
|
||||
# posix
|
||||
# sysv
|
||||
# windows
|
||||
# mmap
|
||||
# (change requires restart)
|
||||
#min_dynamic_shared_memory = 0MB # (change requires restart)
|
||||
|
||||
# - Disk -
|
||||
|
||||
#temp_file_limit = -1 # limits per-process temp file space
|
||||
# in kilobytes, or -1 for no limit
|
||||
|
||||
# - Kernel Resources -
|
||||
|
||||
#max_files_per_process = 1000 # min 64
|
||||
# (change requires restart)
|
||||
|
||||
# - Cost-Based Vacuum Delay -
|
||||
|
||||
#vacuum_cost_delay = 0 # 0-100 milliseconds (0 disables)
|
||||
#vacuum_cost_page_hit = 1 # 0-10000 credits
|
||||
#vacuum_cost_page_miss = 2 # 0-10000 credits
|
||||
#vacuum_cost_page_dirty = 20 # 0-10000 credits
|
||||
#vacuum_cost_limit = 200 # 1-10000 credits
|
||||
|
||||
# - Background Writer -
|
||||
|
||||
#bgwriter_delay = 200ms # 10-10000ms between rounds
|
||||
#bgwriter_lru_maxpages = 100 # max buffers written/round, 0 disables
|
||||
#bgwriter_lru_multiplier = 2.0 # 0-10.0 multiplier on buffers scanned/round
|
||||
#bgwriter_flush_after = 512kB # measured in pages, 0 disables
|
||||
|
||||
# - Asynchronous Behavior -
|
||||
|
||||
#backend_flush_after = 0 # measured in pages, 0 disables
|
||||
#effective_io_concurrency = 1 # 1-1000; 0 disables prefetching
|
||||
#maintenance_io_concurrency = 10 # 1-1000; 0 disables prefetching
|
||||
#max_worker_processes = 8 # (change requires restart)
|
||||
#max_parallel_workers_per_gather = 2 # limited by max_parallel_workers
|
||||
#max_parallel_maintenance_workers = 2 # limited by max_parallel_workers
|
||||
#max_parallel_workers = 8 # number of max_worker_processes that
|
||||
# can be used in parallel operations
|
||||
#parallel_leader_participation = on
|
||||
#old_snapshot_threshold = -1 # 1min-60d; -1 disables; 0 is immediate
|
||||
# (change requires restart)
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# WRITE-AHEAD LOG
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Settings -
|
||||
|
||||
#wal_level = replica # minimal, replica, or logical
|
||||
# (change requires restart)
|
||||
#fsync = on # flush data to disk for crash safety
|
||||
# (turning this off can cause
|
||||
# unrecoverable data corruption)
|
||||
#synchronous_commit = on # synchronization level;
|
||||
# off, local, remote_write, remote_apply, or on
|
||||
#wal_sync_method = fsync # the default is the first option
|
||||
# supported by the operating system:
|
||||
# open_datasync
|
||||
# fdatasync (default on Linux and FreeBSD)
|
||||
# fsync
|
||||
# fsync_writethrough
|
||||
# open_sync
|
||||
#full_page_writes = on # recover from partial page writes
|
||||
#wal_log_hints = off # also do full page writes of non-critical updates
|
||||
# (change requires restart)
|
||||
#wal_compression = off # enables compression of full-page writes;
|
||||
# off, pglz, lz4, zstd, or on
|
||||
#wal_init_zero = on # zero-fill new WAL files
|
||||
#wal_recycle = on # recycle WAL files
|
||||
#wal_buffers = -1 # min 32kB, -1 sets based on shared_buffers
|
||||
# (change requires restart)
|
||||
#wal_writer_delay = 200ms # 1-10000 milliseconds
|
||||
#wal_writer_flush_after = 1MB # measured in pages, 0 disables
|
||||
#wal_skip_threshold = 2MB
|
||||
|
||||
#commit_delay = 0 # range 0-100000, in microseconds
|
||||
#commit_siblings = 5 # range 1-1000
|
||||
|
||||
# - Checkpoints -
|
||||
|
||||
#checkpoint_timeout = 5min # range 30s-1d
|
||||
#checkpoint_completion_target = 0.9 # checkpoint target duration, 0.0 - 1.0
|
||||
#checkpoint_flush_after = 256kB # measured in pages, 0 disables
|
||||
#checkpoint_warning = 30s # 0 disables
|
||||
max_wal_size = 1GB
|
||||
min_wal_size = 80MB
|
||||
|
||||
# - Prefetching during recovery -
|
||||
|
||||
#recovery_prefetch = try # prefetch pages referenced in the WAL?
|
||||
#wal_decode_buffer_size = 512kB # lookahead window used for prefetching
|
||||
# (change requires restart)
|
||||
|
||||
# - Archiving -
|
||||
|
||||
#archive_mode = off # enables archiving; off, on, or always
|
||||
# (change requires restart)
|
||||
#archive_library = '' # library to use to archive a logfile segment
|
||||
# (empty string indicates archive_command should
|
||||
# be used)
|
||||
#archive_command = '' # command to use to archive a logfile segment
|
||||
# placeholders: %p = path of file to archive
|
||||
# %f = file name only
|
||||
# e.g. 'test ! -f /mnt/server/archivedir/%f && cp %p /mnt/server/archivedir/%f'
|
||||
#archive_timeout = 0 # force a logfile segment switch after this
|
||||
# number of seconds; 0 disables
|
||||
|
||||
# - Archive Recovery -
|
||||
|
||||
# These are only used in recovery mode.
|
||||
|
||||
#restore_command = '' # command to use to restore an archived logfile segment
|
||||
# placeholders: %p = path of file to restore
|
||||
# %f = file name only
|
||||
# e.g. 'cp /mnt/server/archivedir/%f %p'
|
||||
#archive_cleanup_command = '' # command to execute at every restartpoint
|
||||
#recovery_end_command = '' # command to execute at completion of recovery
|
||||
|
||||
# - Recovery Target -
|
||||
|
||||
# Set these only when performing a targeted recovery.
|
||||
|
||||
#recovery_target = '' # 'immediate' to end recovery as soon as a
|
||||
# consistent state is reached
|
||||
# (change requires restart)
|
||||
#recovery_target_name = '' # the named restore point to which recovery will proceed
|
||||
# (change requires restart)
|
||||
#recovery_target_time = '' # the time stamp up to which recovery will proceed
|
||||
# (change requires restart)
|
||||
#recovery_target_xid = '' # the transaction ID up to which recovery will proceed
|
||||
# (change requires restart)
|
||||
#recovery_target_lsn = '' # the WAL LSN up to which recovery will proceed
|
||||
# (change requires restart)
|
||||
#recovery_target_inclusive = on # Specifies whether to stop:
|
||||
# just after the specified recovery target (on)
|
||||
# just before the recovery target (off)
|
||||
# (change requires restart)
|
||||
#recovery_target_timeline = 'latest' # 'current', 'latest', or timeline ID
|
||||
# (change requires restart)
|
||||
#recovery_target_action = 'pause' # 'pause', 'promote', 'shutdown'
|
||||
# (change requires restart)
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# REPLICATION
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Sending Servers -
|
||||
|
||||
# Set these on the primary and on any standby that will send replication data.
|
||||
|
||||
#max_wal_senders = 10 # max number of walsender processes
|
||||
# (change requires restart)
|
||||
#max_replication_slots = 10 # max number of replication slots
|
||||
# (change requires restart)
|
||||
#wal_keep_size = 0 # in megabytes; 0 disables
|
||||
#max_slot_wal_keep_size = -1 # in megabytes; -1 disables
|
||||
#wal_sender_timeout = 60s # in milliseconds; 0 disables
|
||||
#track_commit_timestamp = off # collect timestamp of transaction commit
|
||||
# (change requires restart)
|
||||
|
||||
# - Primary Server -
|
||||
|
||||
# These settings are ignored on a standby server.
|
||||
|
||||
#synchronous_standby_names = '' # standby servers that provide sync rep
|
||||
# method to choose sync standbys, number of sync standbys,
|
||||
# and comma-separated list of application_name
|
||||
# from standby(s); '*' = all
|
||||
#vacuum_defer_cleanup_age = 0 # number of xacts by which cleanup is delayed
|
||||
|
||||
# - Standby Servers -
|
||||
|
||||
# These settings are ignored on a primary server.
|
||||
|
||||
#primary_conninfo = '' # connection string to sending server
|
||||
#primary_slot_name = '' # replication slot on sending server
|
||||
#promote_trigger_file = '' # file name whose presence ends recovery
|
||||
#hot_standby = on # "off" disallows queries during recovery
|
||||
# (change requires restart)
|
||||
#max_standby_archive_delay = 30s # max delay before canceling queries
|
||||
# when reading WAL from archive;
|
||||
# -1 allows indefinite delay
|
||||
#max_standby_streaming_delay = 30s # max delay before canceling queries
|
||||
# when reading streaming WAL;
|
||||
# -1 allows indefinite delay
|
||||
#wal_receiver_create_temp_slot = off # create temp slot if primary_slot_name
|
||||
# is not set
|
||||
#wal_receiver_status_interval = 10s # send replies at least this often
|
||||
# 0 disables
|
||||
#hot_standby_feedback = off # send info from standby to prevent
|
||||
# query conflicts
|
||||
#wal_receiver_timeout = 60s # time that receiver waits for
|
||||
# communication from primary
|
||||
# in milliseconds; 0 disables
|
||||
#wal_retrieve_retry_interval = 5s # time to wait before retrying to
|
||||
# retrieve WAL after a failed attempt
|
||||
#recovery_min_apply_delay = 0 # minimum delay for applying changes during recovery
|
||||
|
||||
# - Subscribers -
|
||||
|
||||
# These settings are ignored on a publisher.
|
||||
|
||||
#max_logical_replication_workers = 4 # taken from max_worker_processes
|
||||
# (change requires restart)
|
||||
#max_sync_workers_per_subscription = 2 # taken from max_logical_replication_workers
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# QUERY TUNING
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Planner Method Configuration -
|
||||
|
||||
#enable_async_append = on
|
||||
#enable_bitmapscan = on
|
||||
#enable_gathermerge = on
|
||||
#enable_hashagg = on
|
||||
#enable_hashjoin = on
|
||||
#enable_incremental_sort = on
|
||||
#enable_indexscan = on
|
||||
#enable_indexonlyscan = on
|
||||
#enable_material = on
|
||||
#enable_memoize = on
|
||||
#enable_mergejoin = on
|
||||
#enable_nestloop = on
|
||||
#enable_parallel_append = on
|
||||
#enable_parallel_hash = on
|
||||
#enable_partition_pruning = on
|
||||
#enable_partitionwise_join = off
|
||||
#enable_partitionwise_aggregate = off
|
||||
#enable_seqscan = on
|
||||
#enable_sort = on
|
||||
#enable_tidscan = on
|
||||
|
||||
# - Planner Cost Constants -
|
||||
|
||||
#seq_page_cost = 1.0 # measured on an arbitrary scale
|
||||
#random_page_cost = 4.0 # same scale as above
|
||||
#cpu_tuple_cost = 0.01 # same scale as above
|
||||
#cpu_index_tuple_cost = 0.005 # same scale as above
|
||||
#cpu_operator_cost = 0.0025 # same scale as above
|
||||
#parallel_setup_cost = 1000.0 # same scale as above
|
||||
#parallel_tuple_cost = 0.1 # same scale as above
|
||||
#min_parallel_table_scan_size = 8MB
|
||||
#min_parallel_index_scan_size = 512kB
|
||||
#effective_cache_size = 4GB
|
||||
|
||||
#jit_above_cost = 100000 # perform JIT compilation if available
|
||||
# and query more expensive than this;
|
||||
# -1 disables
|
||||
#jit_inline_above_cost = 500000 # inline small functions if query is
|
||||
# more expensive than this; -1 disables
|
||||
#jit_optimize_above_cost = 500000 # use expensive JIT optimizations if
|
||||
# query is more expensive than this;
|
||||
# -1 disables
|
||||
|
||||
# - Genetic Query Optimizer -
|
||||
|
||||
#geqo = on
|
||||
#geqo_threshold = 12
|
||||
#geqo_effort = 5 # range 1-10
|
||||
#geqo_pool_size = 0 # selects default based on effort
|
||||
#geqo_generations = 0 # selects default based on effort
|
||||
#geqo_selection_bias = 2.0 # range 1.5-2.0
|
||||
#geqo_seed = 0.0 # range 0.0-1.0
|
||||
|
||||
# - Other Planner Options -
|
||||
|
||||
#default_statistics_target = 100 # range 1-10000
|
||||
#constraint_exclusion = partition # on, off, or partition
|
||||
#cursor_tuple_fraction = 0.1 # range 0.0-1.0
|
||||
#from_collapse_limit = 8
|
||||
#jit = on # allow JIT compilation
|
||||
#join_collapse_limit = 8 # 1 disables collapsing of explicit
|
||||
# JOIN clauses
|
||||
#plan_cache_mode = auto # auto, force_generic_plan or
|
||||
# force_custom_plan
|
||||
#recursive_worktable_factor = 10.0 # range 0.001-1000000
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# REPORTING AND LOGGING
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Where to Log -
|
||||
|
||||
#log_destination = 'stderr' # Valid values are combinations of
|
||||
# stderr, csvlog, jsonlog, syslog, and
|
||||
# eventlog, depending on platform.
|
||||
# csvlog and jsonlog require
|
||||
# logging_collector to be on.
|
||||
|
||||
# This is used when logging to stderr:
|
||||
#logging_collector = off # Enable capturing of stderr, jsonlog,
|
||||
# and csvlog into log files. Required
|
||||
# to be on for csvlogs and jsonlogs.
|
||||
# (change requires restart)
|
||||
|
||||
# These are only used if logging_collector is on:
|
||||
#log_directory = 'log' # directory where log files are written,
|
||||
# can be absolute or relative to PGDATA
|
||||
#log_filename = 'postgresql-%Y-%m-%d_%H%M%S.log' # log file name pattern,
|
||||
# can include strftime() escapes
|
||||
#log_file_mode = 0600 # creation mode for log files,
|
||||
# begin with 0 to use octal notation
|
||||
#log_rotation_age = 1d # Automatic rotation of logfiles will
|
||||
# happen after that time. 0 disables.
|
||||
#log_rotation_size = 10MB # Automatic rotation of logfiles will
|
||||
# happen after that much log output.
|
||||
# 0 disables.
|
||||
#log_truncate_on_rotation = off # If on, an existing log file with the
|
||||
# same name as the new log file will be
|
||||
# truncated rather than appended to.
|
||||
# But such truncation only occurs on
|
||||
# time-driven rotation, not on restarts
|
||||
# or size-driven rotation. Default is
|
||||
# off, meaning append to existing files
|
||||
# in all cases.
|
||||
|
||||
# These are relevant when logging to syslog:
|
||||
#syslog_facility = 'LOCAL0'
|
||||
#syslog_ident = 'postgres'
|
||||
#syslog_sequence_numbers = on
|
||||
#syslog_split_messages = on
|
||||
|
||||
# This is only relevant when logging to eventlog (Windows):
|
||||
# (change requires restart)
|
||||
#event_source = 'PostgreSQL'
|
||||
|
||||
# - When to Log -
|
||||
|
||||
#log_min_messages = warning # values in order of decreasing detail:
|
||||
# debug5
|
||||
# debug4
|
||||
# debug3
|
||||
# debug2
|
||||
# debug1
|
||||
# info
|
||||
# notice
|
||||
# warning
|
||||
# error
|
||||
# log
|
||||
# fatal
|
||||
# panic
|
||||
|
||||
#log_min_error_statement = error # values in order of decreasing detail:
|
||||
# debug5
|
||||
# debug4
|
||||
# debug3
|
||||
# debug2
|
||||
# debug1
|
||||
# info
|
||||
# notice
|
||||
# warning
|
||||
# error
|
||||
# log
|
||||
# fatal
|
||||
# panic (effectively off)
|
||||
|
||||
#log_min_duration_statement = -1 # -1 is disabled, 0 logs all statements
|
||||
# and their durations, > 0 logs only
|
||||
# statements running at least this number
|
||||
# of milliseconds
|
||||
|
||||
#log_min_duration_sample = -1 # -1 is disabled, 0 logs a sample of statements
|
||||
# and their durations, > 0 logs only a sample of
|
||||
# statements running at least this number
|
||||
# of milliseconds;
|
||||
# sample fraction is determined by log_statement_sample_rate
|
||||
|
||||
#log_statement_sample_rate = 1.0 # fraction of logged statements exceeding
|
||||
# log_min_duration_sample to be logged;
|
||||
# 1.0 logs all such statements, 0.0 never logs
|
||||
|
||||
|
||||
#log_transaction_sample_rate = 0.0 # fraction of transactions whose statements
|
||||
# are logged regardless of their duration; 1.0 logs all
|
||||
# statements from all transactions, 0.0 never logs
|
||||
|
||||
#log_startup_progress_interval = 10s # Time between progress updates for
|
||||
# long-running startup operations.
|
||||
# 0 disables the feature, > 0 indicates
|
||||
# the interval in milliseconds.
|
||||
|
||||
# - What to Log -
|
||||
|
||||
#debug_print_parse = off
|
||||
#debug_print_rewritten = off
|
||||
#debug_print_plan = off
|
||||
#debug_pretty_print = on
|
||||
#log_autovacuum_min_duration = 10min # log autovacuum activity;
|
||||
# -1 disables, 0 logs all actions and
|
||||
# their durations, > 0 logs only
|
||||
# actions running at least this number
|
||||
# of milliseconds.
|
||||
#log_checkpoints = on
|
||||
#log_connections = off
|
||||
#log_disconnections = off
|
||||
#log_duration = off
|
||||
#log_error_verbosity = default # terse, default, or verbose messages
|
||||
#log_hostname = off
|
||||
log_line_prefix = '%m [%p] %q%u@%d ' # special values:
|
||||
# %a = application name
|
||||
# %u = user name
|
||||
# %d = database name
|
||||
# %r = remote host and port
|
||||
# %h = remote host
|
||||
# %b = backend type
|
||||
# %p = process ID
|
||||
# %P = process ID of parallel group leader
|
||||
# %t = timestamp without milliseconds
|
||||
# %m = timestamp with milliseconds
|
||||
# %n = timestamp with milliseconds (as a Unix epoch)
|
||||
# %Q = query ID (0 if none or not computed)
|
||||
# %i = command tag
|
||||
# %e = SQL state
|
||||
# %c = session ID
|
||||
# %l = session line number
|
||||
# %s = session start timestamp
|
||||
# %v = virtual transaction ID
|
||||
# %x = transaction ID (0 if none)
|
||||
# %q = stop here in non-session
|
||||
# processes
|
||||
# %% = '%'
|
||||
# e.g. '<%u%%%d> '
|
||||
#log_lock_waits = off # log lock waits >= deadlock_timeout
|
||||
#log_recovery_conflict_waits = off # log standby recovery conflict waits
|
||||
# >= deadlock_timeout
|
||||
#log_parameter_max_length = -1 # when logging statements, limit logged
|
||||
# bind-parameter values to N bytes;
|
||||
# -1 means print in full, 0 disables
|
||||
#log_parameter_max_length_on_error = 0 # when logging an error, limit logged
|
||||
# bind-parameter values to N bytes;
|
||||
# -1 means print in full, 0 disables
|
||||
#log_statement = 'none' # none, ddl, mod, all
|
||||
#log_replication_commands = off
|
||||
#log_temp_files = -1 # log temporary files equal or larger
|
||||
# than the specified size in kilobytes;
|
||||
# -1 disables, 0 logs all temp files
|
||||
log_timezone = 'Etc/UTC'
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# PROCESS TITLE
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
cluster_name = '15/main' # added to process titles if nonempty
|
||||
# (change requires restart)
|
||||
#update_process_title = on
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# STATISTICS
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Cumulative Query and Index Statistics -
|
||||
|
||||
#track_activities = on
|
||||
#track_activity_query_size = 1024 # (change requires restart)
|
||||
#track_counts = on
|
||||
#track_io_timing = off
|
||||
#track_wal_io_timing = off
|
||||
#track_functions = none # none, pl, all
|
||||
#stats_fetch_consistency = cache
|
||||
|
||||
|
||||
# - Monitoring -
|
||||
|
||||
#compute_query_id = auto
|
||||
#log_statement_stats = off
|
||||
#log_parser_stats = off
|
||||
#log_planner_stats = off
|
||||
#log_executor_stats = off
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# AUTOVACUUM
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
#autovacuum = on # Enable autovacuum subprocess? 'on'
|
||||
# requires track_counts to also be on.
|
||||
#autovacuum_max_workers = 3 # max number of autovacuum subprocesses
|
||||
# (change requires restart)
|
||||
#autovacuum_naptime = 1min # time between autovacuum runs
|
||||
#autovacuum_vacuum_threshold = 50 # min number of row updates before
|
||||
# vacuum
|
||||
#autovacuum_vacuum_insert_threshold = 1000 # min number of row inserts
|
||||
# before vacuum; -1 disables insert
|
||||
# vacuums
|
||||
#autovacuum_analyze_threshold = 50 # min number of row updates before
|
||||
# analyze
|
||||
#autovacuum_vacuum_scale_factor = 0.2 # fraction of table size before vacuum
|
||||
#autovacuum_vacuum_insert_scale_factor = 0.2 # fraction of inserts over table
|
||||
# size before insert vacuum
|
||||
#autovacuum_analyze_scale_factor = 0.1 # fraction of table size before analyze
|
||||
#autovacuum_freeze_max_age = 200000000 # maximum XID age before forced vacuum
|
||||
# (change requires restart)
|
||||
#autovacuum_multixact_freeze_max_age = 400000000 # maximum multixact age
|
||||
# before forced vacuum
|
||||
# (change requires restart)
|
||||
#autovacuum_vacuum_cost_delay = 2ms # default vacuum cost delay for
|
||||
# autovacuum, in milliseconds;
|
||||
# -1 means use vacuum_cost_delay
|
||||
#autovacuum_vacuum_cost_limit = -1 # default vacuum cost limit for
|
||||
# autovacuum, -1 means use
|
||||
# vacuum_cost_limit
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# CLIENT CONNECTION DEFAULTS
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Statement Behavior -
|
||||
|
||||
#client_min_messages = notice # values in order of decreasing detail:
|
||||
# debug5
|
||||
# debug4
|
||||
# debug3
|
||||
# debug2
|
||||
# debug1
|
||||
# log
|
||||
# notice
|
||||
# warning
|
||||
# error
|
||||
#search_path = '"$user", public' # schema names
|
||||
#row_security = on
|
||||
#default_table_access_method = 'heap'
|
||||
#default_tablespace = '' # a tablespace name, '' uses the default
|
||||
#default_toast_compression = 'pglz' # 'pglz' or 'lz4'
|
||||
#temp_tablespaces = '' # a list of tablespace names, '' uses
|
||||
# only default tablespace
|
||||
#check_function_bodies = on
|
||||
#default_transaction_isolation = 'read committed'
|
||||
#default_transaction_read_only = off
|
||||
#default_transaction_deferrable = off
|
||||
#session_replication_role = 'origin'
|
||||
#statement_timeout = 0 # in milliseconds, 0 is disabled
|
||||
#lock_timeout = 0 # in milliseconds, 0 is disabled
|
||||
#idle_in_transaction_session_timeout = 0 # in milliseconds, 0 is disabled
|
||||
#idle_session_timeout = 0 # in milliseconds, 0 is disabled
|
||||
#vacuum_freeze_table_age = 150000000
|
||||
#vacuum_freeze_min_age = 50000000
|
||||
#vacuum_failsafe_age = 1600000000
|
||||
#vacuum_multixact_freeze_table_age = 150000000
|
||||
#vacuum_multixact_freeze_min_age = 5000000
|
||||
#vacuum_multixact_failsafe_age = 1600000000
|
||||
#bytea_output = 'hex' # hex, escape
|
||||
#xmlbinary = 'base64'
|
||||
#xmloption = 'content'
|
||||
#gin_pending_list_limit = 4MB
|
||||
|
||||
# - Locale and Formatting -
|
||||
|
||||
datestyle = 'iso, mdy'
|
||||
#intervalstyle = 'postgres'
|
||||
timezone = 'Etc/UTC'
|
||||
#timezone_abbreviations = 'Default' # Select the set of available time zone
|
||||
# abbreviations. Currently, there are
|
||||
# Default
|
||||
# Australia (historical usage)
|
||||
# India
|
||||
# You can create your own file in
|
||||
# share/timezonesets/.
|
||||
#extra_float_digits = 1 # min -15, max 3; any value >0 actually
|
||||
# selects precise output mode
|
||||
#client_encoding = sql_ascii # actually, defaults to database
|
||||
# encoding
|
||||
|
||||
# These settings are initialized by initdb, but they can be changed.
|
||||
lc_messages = 'C.UTF-8' # locale for system error message
|
||||
# strings
|
||||
lc_monetary = 'C.UTF-8' # locale for monetary formatting
|
||||
lc_numeric = 'C.UTF-8' # locale for number formatting
|
||||
lc_time = 'C.UTF-8' # locale for time formatting
|
||||
|
||||
# default configuration for text search
|
||||
default_text_search_config = 'pg_catalog.english'
|
||||
|
||||
# - Shared Library Preloading -
|
||||
|
||||
#local_preload_libraries = ''
|
||||
#session_preload_libraries = ''
|
||||
#shared_preload_libraries = '' # (change requires restart)
|
||||
#jit_provider = 'llvmjit' # JIT library to use
|
||||
|
||||
# - Other Defaults -
|
||||
|
||||
#dynamic_library_path = '$libdir'
|
||||
#extension_destdir = '' # prepend path when loading extensions
|
||||
# and shared objects (added by Debian)
|
||||
#gin_fuzzy_search_limit = 0
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# LOCK MANAGEMENT
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
#deadlock_timeout = 1s
|
||||
#max_locks_per_transaction = 64 # min 10
|
||||
# (change requires restart)
|
||||
#max_pred_locks_per_transaction = 64 # min 10
|
||||
# (change requires restart)
|
||||
#max_pred_locks_per_relation = -2 # negative values mean
|
||||
# (max_pred_locks_per_transaction
|
||||
# / -max_pred_locks_per_relation) - 1
|
||||
#max_pred_locks_per_page = 2 # min 0
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# VERSION AND PLATFORM COMPATIBILITY
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# - Previous PostgreSQL Versions -
|
||||
|
||||
#array_nulls = on
|
||||
#backslash_quote = safe_encoding # on, off, or safe_encoding
|
||||
#escape_string_warning = on
|
||||
#lo_compat_privileges = off
|
||||
#quote_all_identifiers = off
|
||||
#standard_conforming_strings = on
|
||||
#synchronize_seqscans = on
|
||||
|
||||
# - Other Platforms and Clients -
|
||||
|
||||
#transform_null_equals = off
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# ERROR HANDLING
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
#exit_on_error = off # terminate session on any error?
|
||||
#restart_after_crash = on # reinitialize after backend crash?
|
||||
#data_sync_retry = off # retry or panic on failure to fsync
|
||||
# data?
|
||||
# (change requires restart)
|
||||
#recovery_init_sync_method = fsync # fsync, syncfs (Linux 5.8+)
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# CONFIG FILE INCLUDES
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# These options allow settings to be loaded from files other than the
|
||||
# default postgresql.conf. Note that these are directives, not variable
|
||||
# assignments, so they can usefully be given more than once.
|
||||
|
||||
# include_dir = 'conf.d' # include files ending in '.conf' from
|
||||
# a directory, e.g., 'conf.d'
|
||||
#include_if_exists = '...' # include file only if it exists
|
||||
#include = '...' # include file
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# CUSTOMIZED OPTIONS
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# Add settings for extensions here
|
||||
|
||||
71
aio/supervisord-app
Normal file
71
aio/supervisord-app
Normal file
@@ -0,0 +1,71 @@
|
||||
|
||||
[program:web]
|
||||
command=node /app/web/web/server.js
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3001,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:space]
|
||||
command=node /app/space/space/server.js
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3002,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:admin]
|
||||
command=node /app/admin/admin/server.js
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3003,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:migrator]
|
||||
directory=/app/api
|
||||
command=sh -c "./bin/docker-entrypoint-migrator.sh"
|
||||
autostart=true
|
||||
autorestart=false
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:api]
|
||||
directory=/app/api
|
||||
command=sh -c "./bin/docker-entrypoint-api.sh"
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:worker]
|
||||
directory=/app/api
|
||||
command=sh -c "./bin/docker-entrypoint-worker.sh"
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:beat]
|
||||
directory=/app/api
|
||||
command=sh -c "./bin/docker-entrypoint-beat.sh"
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
user=root
|
||||
nodaemon=true
|
||||
stderr_logfile=/app/logs/error/supervisor.err.log
|
||||
stdout_logfile=/app/logs/access/supervisor.out.log
|
||||
stdout_logfile=/app/logs/access/supervisor.log
|
||||
|
||||
[program:redis]
|
||||
directory=/app/data/redis
|
||||
@@ -10,15 +10,15 @@ command=redis-server
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/redis.err.log
|
||||
stdout_logfile=/app/logs/access/redis.out.log
|
||||
stdout_logfile=/app/logs/access/redis.log
|
||||
|
||||
[program:postgresql]
|
||||
user=postgres
|
||||
command=/usr/lib/postgresql/15/bin/postgres --config-file=/etc/postgresql/15/main/postgresql.conf
|
||||
command=/usr/lib/postgresql/15/bin/postgres --config-file=/etc/postgresql/postgresql.conf
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/postgresql.err.log
|
||||
stdout_logfile=/app/logs/access/postgresql.out.log
|
||||
stdout_logfile=/app/logs/access/postgresql.log
|
||||
|
||||
[program:minio]
|
||||
directory=/app/data/minio
|
||||
@@ -26,12 +26,13 @@ command=minio server /app/data/minio
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/minio.err.log
|
||||
stdout_logfile=/app/logs/access/minio.out.log
|
||||
stdout_logfile=/app/logs/access/minio.log
|
||||
|
||||
[program:nginx]
|
||||
directory=/app/data/nginx
|
||||
command=/usr/sbin/nginx -g 'daemon off;'
|
||||
command=/app/nginx-start.sh
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/nginx.err.log
|
||||
stdout_logfile=/app/logs/access/nginx.out.log
|
||||
stdout_logfile=/app/logs/access/nginx.log
|
||||
|
||||
14
aio/supervisord-slim-base
Normal file
14
aio/supervisord-slim-base
Normal file
@@ -0,0 +1,14 @@
|
||||
[supervisord]
|
||||
user=root
|
||||
nodaemon=true
|
||||
stderr_logfile=/app/logs/error/supervisor.err.log
|
||||
stdout_logfile=/app/logs/access/supervisor.log
|
||||
|
||||
[program:nginx]
|
||||
directory=/app/data/nginx
|
||||
command=/app/nginx-start.sh
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/app/logs/error/nginx.err.log
|
||||
stdout_logfile=/app/logs/access/nginx.log
|
||||
|
||||
@@ -1,115 +0,0 @@
|
||||
[supervisord]
|
||||
user=root
|
||||
nodaemon=true
|
||||
priority=1
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:redis]
|
||||
directory=/app/data/redis
|
||||
command=redis-server
|
||||
autostart=true
|
||||
autorestart=true
|
||||
priority=1
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:postgresql]
|
||||
user=postgres
|
||||
command=/usr/lib/postgresql/15/bin/postgres -D /var/lib/postgresql/data --config-file=/etc/postgresql/postgresql.conf
|
||||
autostart=true
|
||||
autorestart=true
|
||||
priority=1
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:minio]
|
||||
directory=/app/data/minio
|
||||
command=minio server /app/data/minio
|
||||
autostart=true
|
||||
autorestart=true
|
||||
priority=1
|
||||
stdout_logfile=/app/logs/access/minio.log
|
||||
stderr_logfile=/app/logs/error/minio.err.log
|
||||
|
||||
[program:nginx]
|
||||
command=/app/nginx-start.sh
|
||||
autostart=true
|
||||
autorestart=true
|
||||
priority=1
|
||||
stdout_logfile=/app/logs/access/nginx.log
|
||||
stderr_logfile=/app/logs/error/nginx.err.log
|
||||
|
||||
|
||||
[program:web]
|
||||
command=/app/aio.sh web
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3001,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:space]
|
||||
command=/app/aio.sh space
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3002,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:admin]
|
||||
command=/app/aio.sh admin
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
environment=PORT=3003,HOSTNAME=0.0.0.0
|
||||
|
||||
[program:migrator]
|
||||
command=/app/aio.sh migrator
|
||||
autostart=true
|
||||
autorestart=false
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:api]
|
||||
command=/app/aio.sh api
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:worker]
|
||||
command=/app/aio.sh worker
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:beat]
|
||||
command=/app/aio.sh beat
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stdout
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
@@ -15,12 +15,18 @@ POSTGRES_DB="plane"
|
||||
POSTGRES_PORT=5432
|
||||
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||
|
||||
|
||||
# Redis Settings
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||
|
||||
# RabbitMQ Settings
|
||||
RABBITMQ_HOST="plane-mq"
|
||||
RABBITMQ_PORT="5672"
|
||||
RABBITMQ_USER="plane"
|
||||
RABBITMQ_PASSWORD="plane"
|
||||
RABBITMQ_VHOST="plane"
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
@@ -50,3 +56,6 @@ GUNICORN_WORKERS=2
|
||||
ADMIN_BASE_URL=
|
||||
SPACE_BASE_URL=
|
||||
APP_BASE_URL=
|
||||
|
||||
# Hard delete files after days
|
||||
HARD_DELETE_AFTER_DAYS=60
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.11.1-alpine3.17 AS backend
|
||||
FROM python:3.12.5-alpine AS backend
|
||||
|
||||
# set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
@@ -7,23 +7,23 @@ ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
RUN apk --no-cache add \
|
||||
"libpq~=15" \
|
||||
"libxslt~=1.1" \
|
||||
"nodejs-current~=19" \
|
||||
"xmlsec~=1.2"
|
||||
RUN apk add --no-cache \
|
||||
"libpq" \
|
||||
"libxslt" \
|
||||
"nodejs-current" \
|
||||
"xmlsec"
|
||||
|
||||
COPY requirements.txt ./
|
||||
COPY requirements ./requirements
|
||||
RUN apk add --no-cache libffi-dev
|
||||
RUN apk add --no-cache --virtual .build-deps \
|
||||
"bash~=5.2" \
|
||||
"g++~=12.2" \
|
||||
"gcc~=12.2" \
|
||||
"cargo~=1.64" \
|
||||
"git~=2" \
|
||||
"make~=4.3" \
|
||||
"postgresql13-dev~=13" \
|
||||
"g++" \
|
||||
"gcc" \
|
||||
"cargo" \
|
||||
"git" \
|
||||
"make" \
|
||||
"postgresql-dev" \
|
||||
"libc-dev" \
|
||||
"linux-headers" \
|
||||
&& \
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.11.1-alpine3.17 AS backend
|
||||
FROM python:3.12.5-alpine AS backend
|
||||
|
||||
# set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
@@ -7,18 +7,18 @@ ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
|
||||
RUN apk --no-cache add \
|
||||
"bash~=5.2" \
|
||||
"libpq~=15" \
|
||||
"libxslt~=1.1" \
|
||||
"nodejs-current~=19" \
|
||||
"xmlsec~=1.2" \
|
||||
"libpq" \
|
||||
"libxslt" \
|
||||
"nodejs-current" \
|
||||
"xmlsec" \
|
||||
"libffi-dev" \
|
||||
"bash~=5.2" \
|
||||
"g++~=12.2" \
|
||||
"gcc~=12.2" \
|
||||
"cargo~=1.64" \
|
||||
"git~=2" \
|
||||
"make~=4.3" \
|
||||
"postgresql13-dev~=13" \
|
||||
"g++" \
|
||||
"gcc" \
|
||||
"cargo" \
|
||||
"git" \
|
||||
"make" \
|
||||
"postgresql-dev" \
|
||||
"libc-dev" \
|
||||
"linux-headers"
|
||||
|
||||
|
||||
@@ -32,4 +32,3 @@ python manage.py create_bucket
|
||||
python manage.py clear_cache
|
||||
|
||||
python manage.py runserver 0.0.0.0:8000 --settings=plane.settings.local
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{
|
||||
"name": "plane-api",
|
||||
"version": "0.21.0"
|
||||
"version": "0.22.0"
|
||||
}
|
||||
|
||||
@@ -40,3 +40,44 @@ class ApiKeyRateThrottle(SimpleRateThrottle):
|
||||
request.META["X-RateLimit-Reset"] = reset_time
|
||||
|
||||
return allowed
|
||||
|
||||
|
||||
class ServiceTokenRateThrottle(SimpleRateThrottle):
|
||||
scope = "service_token"
|
||||
rate = "300/minute"
|
||||
|
||||
def get_cache_key(self, request, view):
|
||||
# Retrieve the API key from the request header
|
||||
api_key = request.headers.get("X-Api-Key")
|
||||
if not api_key:
|
||||
return None # Allow the request if there's no API key
|
||||
|
||||
# Use the API key as part of the cache key
|
||||
return f"{self.scope}:{api_key}"
|
||||
|
||||
def allow_request(self, request, view):
|
||||
allowed = super().allow_request(request, view)
|
||||
|
||||
if allowed:
|
||||
now = self.timer()
|
||||
# Calculate the remaining limit and reset time
|
||||
history = self.cache.get(self.key, [])
|
||||
|
||||
# Remove old histories
|
||||
while history and history[-1] <= now - self.duration:
|
||||
history.pop()
|
||||
|
||||
# Calculate the requests
|
||||
num_requests = len(history)
|
||||
|
||||
# Check available requests
|
||||
available = self.num_requests - num_requests
|
||||
|
||||
# Unix timestamp for when the rate limit will reset
|
||||
reset_time = int(now + self.duration)
|
||||
|
||||
# Add headers
|
||||
request.META["X-RateLimit-Remaining"] = max(0, available)
|
||||
request.META["X-RateLimit-Reset"] = reset_time
|
||||
|
||||
return allowed
|
||||
@@ -10,6 +10,7 @@ from .issue import (
|
||||
IssueAttachmentSerializer,
|
||||
IssueActivitySerializer,
|
||||
IssueExpandSerializer,
|
||||
IssueLiteSerializer,
|
||||
)
|
||||
from .state import StateLiteSerializer, StateSerializer
|
||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
|
||||
|
||||
@@ -67,6 +67,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
# Import all the expandable serializers
|
||||
from . import (
|
||||
IssueSerializer,
|
||||
IssueLiteSerializer,
|
||||
ProjectLiteSerializer,
|
||||
StateLiteSerializer,
|
||||
UserLiteSerializer,
|
||||
@@ -86,6 +87,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
"actor": UserLiteSerializer,
|
||||
"owned_by": UserLiteSerializer,
|
||||
"members": UserLiteSerializer,
|
||||
"parent": IssueLiteSerializer,
|
||||
}
|
||||
# Check if field in expansion then expand the field
|
||||
if expand in expansion:
|
||||
|
||||
@@ -13,9 +13,9 @@ class CycleSerializer(BaseSerializer):
|
||||
started_issues = serializers.IntegerField(read_only=True)
|
||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_issues = serializers.IntegerField(read_only=True)
|
||||
total_estimates = serializers.IntegerField(read_only=True)
|
||||
completed_estimates = serializers.IntegerField(read_only=True)
|
||||
started_estimates = serializers.IntegerField(read_only=True)
|
||||
total_estimates = serializers.FloatField(read_only=True)
|
||||
completed_estimates = serializers.FloatField(read_only=True)
|
||||
started_estimates = serializers.FloatField(read_only=True)
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
@@ -40,6 +40,7 @@ class CycleSerializer(BaseSerializer):
|
||||
"workspace",
|
||||
"project",
|
||||
"owned_by",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import URLValidator
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from lxml import html
|
||||
@@ -11,6 +8,7 @@ from rest_framework import serializers
|
||||
# Module imports
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueType,
|
||||
IssueActivity,
|
||||
IssueAssignee,
|
||||
IssueAttachment,
|
||||
@@ -29,6 +27,9 @@ from .module import ModuleLiteSerializer, ModuleSerializer
|
||||
from .state import StateLiteSerializer
|
||||
from .user import UserLiteSerializer
|
||||
|
||||
# Django imports
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import URLValidator
|
||||
|
||||
class IssueSerializer(BaseSerializer):
|
||||
assignees = serializers.ListField(
|
||||
@@ -46,6 +47,12 @@ class IssueSerializer(BaseSerializer):
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
type_id = serializers.PrimaryKeyRelatedField(
|
||||
source="type",
|
||||
queryset=IssueType.objects.all(),
|
||||
required=False,
|
||||
allow_null=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
@@ -53,9 +60,7 @@ class IssueSerializer(BaseSerializer):
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
exclude = [
|
||||
@@ -131,7 +136,20 @@ class IssueSerializer(BaseSerializer):
|
||||
workspace_id = self.context["workspace_id"]
|
||||
default_assignee_id = self.context["default_assignee_id"]
|
||||
|
||||
issue = Issue.objects.create(**validated_data, project_id=project_id)
|
||||
issue_type = validated_data.pop("type", None)
|
||||
|
||||
if not issue_type:
|
||||
# Get default issue type
|
||||
issue_type = IssueType.objects.filter(
|
||||
project_issue_types__project_id=project_id, is_default=True
|
||||
).first()
|
||||
issue_type = issue_type
|
||||
|
||||
issue = Issue.objects.create(
|
||||
**validated_data,
|
||||
project_id=project_id,
|
||||
type=issue_type,
|
||||
)
|
||||
|
||||
# Issue Audit Users
|
||||
created_by_id = issue.created_by_id
|
||||
@@ -256,6 +274,17 @@ class IssueSerializer(BaseSerializer):
|
||||
return data
|
||||
|
||||
|
||||
class IssueLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = [
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class LabelSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Label
|
||||
@@ -268,6 +297,7 @@ class LabelSerializer(BaseSerializer):
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
|
||||
@@ -285,7 +315,7 @@ class IssueLinkSerializer(BaseSerializer):
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
def validate_url(self, value):
|
||||
# Check URL format
|
||||
validate_url = URLValidator()
|
||||
@@ -312,10 +342,14 @@ class IssueLinkSerializer(BaseSerializer):
|
||||
return IssueLink.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
if IssueLink.objects.filter(
|
||||
url=validated_data.get("url"),
|
||||
issue_id=instance.issue_id,
|
||||
).exclude(pk=instance.id).exists():
|
||||
if (
|
||||
IssueLink.objects.filter(
|
||||
url=validated_data.get("url"),
|
||||
issue_id=instance.issue_id,
|
||||
)
|
||||
.exclude(pk=instance.id)
|
||||
.exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
{"error": "URL already exists for this Issue"}
|
||||
)
|
||||
@@ -332,9 +366,7 @@ class IssueAttachmentSerializer(BaseSerializer):
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
@@ -39,6 +39,7 @@ class ModuleSerializer(BaseSerializer):
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
def to_representation(self, instance):
|
||||
|
||||
@@ -31,6 +31,7 @@ class ProjectSerializer(BaseSerializer):
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
def validate(self, data):
|
||||
|
||||
@@ -23,6 +23,7 @@ class StateSerializer(BaseSerializer):
|
||||
"updated_at",
|
||||
"workspace",
|
||||
"project",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ from .issue import urlpatterns as issue_patterns
|
||||
from .cycle import urlpatterns as cycle_patterns
|
||||
from .module import urlpatterns as module_patterns
|
||||
from .inbox import urlpatterns as inbox_patterns
|
||||
from .member import urlpatterns as member_patterns
|
||||
|
||||
urlpatterns = [
|
||||
*project_patterns,
|
||||
@@ -12,4 +13,5 @@ urlpatterns = [
|
||||
*cycle_patterns,
|
||||
*module_patterns,
|
||||
*inbox_patterns,
|
||||
*member_patterns,
|
||||
]
|
||||
|
||||
@@ -7,6 +7,7 @@ from plane.api.views import (
|
||||
IssueCommentAPIEndpoint,
|
||||
IssueActivityAPIEndpoint,
|
||||
WorkspaceIssueAPIEndpoint,
|
||||
IssueAttachmentEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
@@ -65,4 +66,9 @@ urlpatterns = [
|
||||
IssueActivityAPIEndpoint.as_view(),
|
||||
name="activity",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/",
|
||||
IssueAttachmentEndpoint.as_view(),
|
||||
name="attachment",
|
||||
),
|
||||
]
|
||||
|
||||
13
apiserver/plane/api/urls/member.py
Normal file
13
apiserver/plane/api/urls/member.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views import (
|
||||
ProjectMemberAPIEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<str:project_id>/members/",
|
||||
ProjectMemberAPIEndpoint.as_view(),
|
||||
name="users",
|
||||
),
|
||||
]
|
||||
@@ -9,6 +9,7 @@ from .issue import (
|
||||
IssueLinkAPIEndpoint,
|
||||
IssueCommentAPIEndpoint,
|
||||
IssueActivityAPIEndpoint,
|
||||
IssueAttachmentEndpoint,
|
||||
)
|
||||
|
||||
from .cycle import (
|
||||
@@ -24,4 +25,7 @@ from .module import (
|
||||
ModuleArchiveUnarchiveAPIEndpoint,
|
||||
)
|
||||
|
||||
from .member import ProjectMemberAPIEndpoint
|
||||
|
||||
from .inbox import InboxIssueAPIEndpoint
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.db import IntegrityError
|
||||
from django.urls import resolve
|
||||
from django.utils import timezone
|
||||
from plane.db.models.api import APIToken
|
||||
from rest_framework import status
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
@@ -16,7 +17,7 @@ from rest_framework.views import APIView
|
||||
|
||||
# Module imports
|
||||
from plane.api.middleware.api_authentication import APIKeyAuthentication
|
||||
from plane.api.rate_limit import ApiKeyRateThrottle
|
||||
from plane.api.rate_limit import ApiKeyRateThrottle, ServiceTokenRateThrottle
|
||||
from plane.utils.exception_logger import log_exception
|
||||
from plane.utils.paginator import BasePaginator
|
||||
|
||||
@@ -44,15 +45,29 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
||||
IsAuthenticated,
|
||||
]
|
||||
|
||||
throttle_classes = [
|
||||
ApiKeyRateThrottle,
|
||||
]
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
for backend in list(self.filter_backends):
|
||||
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||
return queryset
|
||||
|
||||
def get_throttles(self):
|
||||
throttle_classes = []
|
||||
api_key = self.request.headers.get("X-Api-Key")
|
||||
|
||||
if api_key:
|
||||
service_token = APIToken.objects.filter(
|
||||
token=api_key,
|
||||
is_service=True,
|
||||
).first()
|
||||
|
||||
if service_token:
|
||||
throttle_classes.append(ServiceTokenRateThrottle())
|
||||
return throttle_classes
|
||||
|
||||
throttle_classes.append(ApiKeyRateThrottle())
|
||||
|
||||
return throttle_classes
|
||||
|
||||
def handle_exception(self, exc):
|
||||
"""
|
||||
Handle any exception that occurs, by returning an appropriate response,
|
||||
@@ -152,4 +167,4 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
||||
for expand in self.request.GET.get("expand", "").split(",")
|
||||
if expand
|
||||
]
|
||||
return expand if expand else None
|
||||
return expand if expand else None
|
||||
@@ -3,9 +3,18 @@ import json
|
||||
|
||||
# Django imports
|
||||
from django.core import serializers
|
||||
from django.db.models import Count, F, Func, OuterRef, Q, Sum
|
||||
from django.utils import timezone
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.models import (
|
||||
Count,
|
||||
F,
|
||||
Func,
|
||||
OuterRef,
|
||||
Q,
|
||||
Sum,
|
||||
FloatField,
|
||||
)
|
||||
from django.db.models.functions import Cast
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
@@ -17,13 +26,16 @@ from plane.api.serializers import (
|
||||
CycleSerializer,
|
||||
)
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
CycleIssue,
|
||||
Issue,
|
||||
Project,
|
||||
IssueAttachment,
|
||||
IssueLink,
|
||||
ProjectMember,
|
||||
UserFavorite,
|
||||
)
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
|
||||
@@ -115,29 +127,6 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
total_estimates=Sum("issue_cycle__issue__estimate_point")
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
"issue_cycle__issue__estimate_point",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_estimates=Sum(
|
||||
"issue_cycle__issue__estimate_point",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
.distinct()
|
||||
)
|
||||
@@ -376,14 +365,28 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request, slug, project_id, pk):
|
||||
cycle = Cycle.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
if cycle.owned_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the cycle"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
cycle_issues = list(
|
||||
CycleIssue.objects.filter(
|
||||
cycle_id=self.kwargs.get("pk")
|
||||
).values_list("issue", flat=True)
|
||||
)
|
||||
cycle = Cycle.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.deleted",
|
||||
@@ -402,11 +405,20 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
# Delete the cycle
|
||||
cycle.delete()
|
||||
# Delete the cycle issues
|
||||
CycleIssue.objects.filter(
|
||||
cycle_id=self.kwargs.get("pk"),
|
||||
).delete()
|
||||
# Delete the user favorite cycle
|
||||
UserFavorite.objects.filter(
|
||||
entity_type="cycle",
|
||||
entity_identifier=pk,
|
||||
project_id=project_id,
|
||||
).delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
@@ -532,6 +544,12 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
cycle.archived_at = timezone.now()
|
||||
cycle.save()
|
||||
UserFavorite.objects.filter(
|
||||
entity_type="cycle",
|
||||
entity_identifier=cycle_id,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
).delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def delete(self, request, slug, project_id, cycle_id):
|
||||
@@ -660,72 +678,63 @@ class CycleIssueAPIEndpoint(BaseAPIView):
|
||||
workspace__slug=slug, project_id=project_id, pk=cycle_id
|
||||
)
|
||||
|
||||
if (
|
||||
cycle.end_date is not None
|
||||
and cycle.end_date < timezone.now().date()
|
||||
):
|
||||
return Response(
|
||||
# Get all CycleIssues already created
|
||||
cycle_issues = list(
|
||||
CycleIssue.objects.filter(
|
||||
~Q(cycle_id=cycle_id), issue_id__in=issues
|
||||
)
|
||||
)
|
||||
|
||||
existing_issues = [
|
||||
str(cycle_issue.issue_id)
|
||||
for cycle_issue in cycle_issues
|
||||
if str(cycle_issue.issue_id) in issues
|
||||
]
|
||||
new_issues = list(set(issues) - set(existing_issues))
|
||||
|
||||
# New issues to create
|
||||
created_records = CycleIssue.objects.bulk_create(
|
||||
[
|
||||
CycleIssue(
|
||||
project_id=project_id,
|
||||
workspace_id=cycle.workspace_id,
|
||||
cycle_id=cycle_id,
|
||||
issue_id=issue,
|
||||
)
|
||||
for issue in new_issues
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
# Updated Issues
|
||||
updated_records = []
|
||||
update_cycle_issue_activity = []
|
||||
# Iterate over each cycle_issue in cycle_issues
|
||||
for cycle_issue in cycle_issues:
|
||||
old_cycle_id = cycle_issue.cycle_id
|
||||
# Update the cycle_issue's cycle_id
|
||||
cycle_issue.cycle_id = cycle_id
|
||||
# Add the modified cycle_issue to the records_to_update list
|
||||
updated_records.append(cycle_issue)
|
||||
# Record the update activity
|
||||
update_cycle_issue_activity.append(
|
||||
{
|
||||
"error": "The Cycle has already been completed so no new issues can be added"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
"old_cycle_id": str(old_cycle_id),
|
||||
"new_cycle_id": str(cycle_id),
|
||||
"issue_id": str(cycle_issue.issue_id),
|
||||
}
|
||||
)
|
||||
|
||||
issues = Issue.objects.filter(
|
||||
pk__in=issues, workspace__slug=slug, project_id=project_id
|
||||
).values_list("id", flat=True)
|
||||
|
||||
# Get all CycleIssues already created
|
||||
cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
|
||||
update_cycle_issue_activity = []
|
||||
record_to_create = []
|
||||
records_to_update = []
|
||||
|
||||
for issue in issues:
|
||||
cycle_issue = [
|
||||
cycle_issue
|
||||
for cycle_issue in cycle_issues
|
||||
if str(cycle_issue.issue_id) in issues
|
||||
]
|
||||
# Update only when cycle changes
|
||||
if len(cycle_issue):
|
||||
if cycle_issue[0].cycle_id != cycle_id:
|
||||
update_cycle_issue_activity.append(
|
||||
{
|
||||
"old_cycle_id": str(cycle_issue[0].cycle_id),
|
||||
"new_cycle_id": str(cycle_id),
|
||||
"issue_id": str(cycle_issue[0].issue_id),
|
||||
}
|
||||
)
|
||||
cycle_issue[0].cycle_id = cycle_id
|
||||
records_to_update.append(cycle_issue[0])
|
||||
else:
|
||||
record_to_create.append(
|
||||
CycleIssue(
|
||||
project_id=project_id,
|
||||
workspace=cycle.workspace,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
cycle=cycle,
|
||||
issue_id=issue,
|
||||
)
|
||||
)
|
||||
|
||||
CycleIssue.objects.bulk_create(
|
||||
record_to_create,
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
# Update the cycle issues
|
||||
CycleIssue.objects.bulk_update(
|
||||
records_to_update,
|
||||
["cycle"],
|
||||
batch_size=10,
|
||||
updated_records, ["cycle_id"], batch_size=100
|
||||
)
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.created",
|
||||
requested_data=json.dumps({"cycles_list": str(issues)}),
|
||||
requested_data=json.dumps({"cycles_list": issues}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
@@ -733,13 +742,14 @@ class CycleIssueAPIEndpoint(BaseAPIView):
|
||||
{
|
||||
"updated_cycle_issues": update_cycle_issue_activity,
|
||||
"created_cycle_issues": serializers.serialize(
|
||||
"json", record_to_create
|
||||
"json", created_records
|
||||
),
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
|
||||
# Return all Cycle Issues
|
||||
return Response(
|
||||
CycleIssueSerializer(self.get_queryset(), many=True).data,
|
||||
@@ -784,7 +794,6 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
def post(self, request, slug, project_id, cycle_id):
|
||||
new_cycle_id = request.data.get("new_cycle_id", False)
|
||||
plot_type = request.GET.get("plot_type", "issues")
|
||||
|
||||
if not new_cycle_id:
|
||||
return Response(
|
||||
@@ -792,9 +801,9 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
new_cycle = Cycle.objects.get(
|
||||
new_cycle = Cycle.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk=new_cycle_id
|
||||
)
|
||||
).first()
|
||||
|
||||
old_cycle = (
|
||||
Cycle.objects.filter(
|
||||
@@ -861,18 +870,131 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
)
|
||||
|
||||
# Pass the new_cycle queryset to burndown_plot
|
||||
completion_chart = burndown_plot(
|
||||
queryset=old_cycle.first(),
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type=plot_type,
|
||||
cycle_id=cycle_id,
|
||||
)
|
||||
estimate_type = Project.objects.filter(
|
||||
workspace__slug=slug,
|
||||
pk=project_id,
|
||||
estimate__isnull=False,
|
||||
estimate__type="points",
|
||||
).exists()
|
||||
|
||||
if estimate_type:
|
||||
assignee_estimate_data = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.values("display_name", "assignee_id", "avatar")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("display_name")
|
||||
)
|
||||
# assignee distribution serialization
|
||||
assignee_estimate_distribution = [
|
||||
{
|
||||
"display_name": item["display_name"],
|
||||
"assignee_id": (
|
||||
str(item["assignee_id"])
|
||||
if item["assignee_id"]
|
||||
else None
|
||||
),
|
||||
"avatar": item["avatar"],
|
||||
"total_estimates": item["total_estimates"],
|
||||
"completed_estimates": item["completed_estimates"],
|
||||
"pending_estimates": item["pending_estimates"],
|
||||
}
|
||||
for item in assignee_estimate_data
|
||||
]
|
||||
|
||||
label_distribution_data = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
|
||||
estimate_completion_chart = burndown_plot(
|
||||
queryset=old_cycle.first(),
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="points",
|
||||
cycle_id=cycle_id,
|
||||
)
|
||||
# Label distribution serialization
|
||||
label_estimate_distribution = [
|
||||
{
|
||||
"label_name": item["label_name"],
|
||||
"color": item["color"],
|
||||
"label_id": (
|
||||
str(item["label_id"]) if item["label_id"] else None
|
||||
),
|
||||
"total_estimates": item["total_estimates"],
|
||||
"completed_estimates": item["completed_estimates"],
|
||||
"pending_estimates": item["pending_estimates"],
|
||||
}
|
||||
for item in label_distribution_data
|
||||
]
|
||||
|
||||
# Get the assignee distribution
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -884,7 +1006,10 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"id",
|
||||
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||
filter=Q(
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
@@ -926,7 +1051,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
# Get the label distribution
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
@@ -938,8 +1063,11 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"id",
|
||||
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||
)
|
||||
filter=Q(
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
@@ -979,26 +1107,42 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
for item in label_distribution
|
||||
]
|
||||
|
||||
# Pass the new_cycle queryset to burndown_plot
|
||||
completion_chart = burndown_plot(
|
||||
queryset=old_cycle.first(),
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="issues",
|
||||
cycle_id=cycle_id,
|
||||
)
|
||||
|
||||
current_cycle = Cycle.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk=cycle_id
|
||||
).first()
|
||||
|
||||
if current_cycle:
|
||||
current_cycle.progress_snapshot = {
|
||||
"total_issues": old_cycle.first().total_issues,
|
||||
"completed_issues": old_cycle.first().completed_issues,
|
||||
"cancelled_issues": old_cycle.first().cancelled_issues,
|
||||
"started_issues": old_cycle.first().started_issues,
|
||||
"unstarted_issues": old_cycle.first().unstarted_issues,
|
||||
"backlog_issues": old_cycle.first().backlog_issues,
|
||||
"distribution": {
|
||||
"labels": label_distribution_data,
|
||||
"assignees": assignee_distribution_data,
|
||||
"completion_chart": completion_chart,
|
||||
},
|
||||
}
|
||||
# Save the snapshot of the current cycle
|
||||
current_cycle.save(update_fields=["progress_snapshot"])
|
||||
current_cycle.progress_snapshot = {
|
||||
"total_issues": old_cycle.first().total_issues,
|
||||
"completed_issues": old_cycle.first().completed_issues,
|
||||
"cancelled_issues": old_cycle.first().cancelled_issues,
|
||||
"started_issues": old_cycle.first().started_issues,
|
||||
"unstarted_issues": old_cycle.first().unstarted_issues,
|
||||
"backlog_issues": old_cycle.first().backlog_issues,
|
||||
"distribution": {
|
||||
"labels": label_distribution_data,
|
||||
"assignees": assignee_distribution_data,
|
||||
"completion_chart": completion_chart,
|
||||
},
|
||||
"estimate_distribution": (
|
||||
{}
|
||||
if not estimate_type
|
||||
else {
|
||||
"labels": label_estimate_distribution,
|
||||
"assignees": assignee_estimate_distribution,
|
||||
"completion_chart": estimate_completion_chart,
|
||||
}
|
||||
),
|
||||
}
|
||||
current_cycle.save(update_fields=["progress_snapshot"])
|
||||
|
||||
if (
|
||||
new_cycle.end_date is not None
|
||||
@@ -1019,12 +1163,38 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
updated_cycles = []
|
||||
update_cycle_issue_activity = []
|
||||
for cycle_issue in cycle_issues:
|
||||
cycle_issue.cycle_id = new_cycle_id
|
||||
updated_cycles.append(cycle_issue)
|
||||
update_cycle_issue_activity.append(
|
||||
{
|
||||
"old_cycle_id": str(cycle_id),
|
||||
"new_cycle_id": str(new_cycle_id),
|
||||
"issue_id": str(cycle_issue.issue_id),
|
||||
}
|
||||
)
|
||||
|
||||
cycle_issues = CycleIssue.objects.bulk_update(
|
||||
updated_cycles, ["cycle_id"], batch_size=100
|
||||
)
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.created",
|
||||
requested_data=json.dumps({"cycles_list": []}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"updated_cycle_issues": update_cycle_issue_activity,
|
||||
"created_cycle_issues": "[]",
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
# Django improts
|
||||
# Django imports
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q, Value, UUIDField
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
@@ -13,7 +16,7 @@ from rest_framework.response import Response
|
||||
# Module imports
|
||||
from plane.api.serializers import InboxIssueSerializer, IssueSerializer
|
||||
from plane.app.permissions import ProjectLitePermission
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Inbox,
|
||||
InboxIssue,
|
||||
@@ -149,7 +152,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
description_html=request.data.get("issue", {}).get(
|
||||
"description_html", "<p></p>"
|
||||
),
|
||||
priority=request.data.get("issue", {}).get("priority", "low"),
|
||||
priority=request.data.get("issue", {}).get("priority", "none"),
|
||||
project_id=project_id,
|
||||
state=state,
|
||||
)
|
||||
@@ -181,13 +184,8 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).first()
|
||||
|
||||
project = Project.objects.get(
|
||||
workspace__slug=slug,
|
||||
pk=project_id,
|
||||
)
|
||||
|
||||
# Inbox view
|
||||
if inbox is None and not project.inbox_view:
|
||||
if inbox is None:
|
||||
return Response(
|
||||
{
|
||||
"error": "Inbox is not enabled for this project enable it through the project's api"
|
||||
@@ -224,8 +222,27 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
issue_data = request.data.pop("issue", False)
|
||||
|
||||
if bool(issue_data):
|
||||
issue = Issue.objects.get(
|
||||
pk=issue_id, workspace__slug=slug, project_id=project_id
|
||||
issue = Issue.objects.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
).get(
|
||||
pk=issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
# Only allow guests and viewers to edit name and description
|
||||
if project_member.role <= 10:
|
||||
@@ -368,29 +385,26 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
inbox_id=inbox.id,
|
||||
)
|
||||
|
||||
# Get the project member
|
||||
project_member = ProjectMember.objects.get(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
# Check the inbox issue created
|
||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(
|
||||
request.user.id
|
||||
):
|
||||
return Response(
|
||||
{"error": "You cannot delete inbox issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check the issue status
|
||||
if inbox_issue.status in [-2, -1, 0, 2]:
|
||||
# Delete the issue also
|
||||
Issue.objects.filter(
|
||||
issue = Issue.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk=issue_id
|
||||
).delete()
|
||||
).first()
|
||||
if issue.created_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the issue"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
issue.delete()
|
||||
|
||||
inbox_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -22,9 +22,11 @@ from django.utils import timezone
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.parsers import MultiPartParser, FormParser
|
||||
|
||||
# Module imports
|
||||
from plane.api.serializers import (
|
||||
IssueAttachmentSerializer,
|
||||
IssueActivitySerializer,
|
||||
IssueCommentSerializer,
|
||||
IssueLinkSerializer,
|
||||
@@ -36,7 +38,7 @@ from plane.app.permissions import (
|
||||
ProjectLitePermission,
|
||||
ProjectMemberPermission,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueActivity,
|
||||
@@ -149,6 +151,25 @@ class IssueAPIEndpoint(BaseAPIView):
|
||||
).distinct()
|
||||
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
external_id = request.GET.get("external_id")
|
||||
external_source = request.GET.get("external_source")
|
||||
|
||||
if external_id and external_source:
|
||||
issue = Issue.objects.get(
|
||||
external_id=external_id,
|
||||
external_source=external_source,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
return Response(
|
||||
IssueSerializer(
|
||||
issue,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
if pk:
|
||||
issue = Issue.issue_objects.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(
|
||||
@@ -307,6 +328,17 @@ class IssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
serializer.save()
|
||||
# Refetch the issue
|
||||
issue = Issue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
pk=serializer.data["id"],
|
||||
).first()
|
||||
issue.created_at = request.data.get("created_at", timezone.now())
|
||||
issue.created_by_id = request.data.get(
|
||||
"created_by", request.user.id
|
||||
)
|
||||
issue.save(update_fields=["created_at", "created_by"])
|
||||
|
||||
# Track the issue
|
||||
issue_activity.delay(
|
||||
@@ -323,6 +355,124 @@ class IssueAPIEndpoint(BaseAPIView):
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def put(self, request, slug, project_id):
|
||||
# Get the entities required for putting the issue, external_id and
|
||||
# external_source are must to identify the issue here
|
||||
project = Project.objects.get(pk=project_id)
|
||||
external_id = request.data.get("external_id")
|
||||
external_source = request.data.get("external_source")
|
||||
|
||||
# If the external_id and source are present, we need to find the exact
|
||||
# issue that needs to be updated with the provided external_id and
|
||||
# external_source
|
||||
if external_id and external_source:
|
||||
try:
|
||||
issue = Issue.objects.get(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
external_id=external_id,
|
||||
external_source=external_source,
|
||||
)
|
||||
|
||||
# Get the current instance of the issue in order to track
|
||||
# changes and dispatch the issue activity
|
||||
current_instance = json.dumps(
|
||||
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
# Get the requested data, encode it as django object and pass it
|
||||
# to serializer to validation
|
||||
requested_data = json.dumps(
|
||||
self.request.data, cls=DjangoJSONEncoder
|
||||
)
|
||||
serializer = IssueSerializer(
|
||||
issue,
|
||||
data=request.data,
|
||||
context={
|
||||
"project_id": project_id,
|
||||
"workspace_id": project.workspace_id,
|
||||
},
|
||||
partial=True,
|
||||
)
|
||||
if serializer.is_valid():
|
||||
# If the serializer is valid, save the issue and dispatch
|
||||
# the update issue activity worker event.
|
||||
serializer.save()
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(
|
||||
# If the serializer is not valid, respond with 400 bad
|
||||
# request
|
||||
serializer.errors,
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Issue.DoesNotExist:
|
||||
# If the issue does not exist, a new record needs to be created
|
||||
# for the requested data.
|
||||
# Serialize the data with the context of the project and
|
||||
# workspace
|
||||
serializer = IssueSerializer(
|
||||
data=request.data,
|
||||
context={
|
||||
"project_id": project_id,
|
||||
"workspace_id": project.workspace_id,
|
||||
"default_assignee_id": project.default_assignee_id,
|
||||
},
|
||||
)
|
||||
|
||||
# If the serializer is valid, save the issue and dispatch the
|
||||
# issue activity worker event as created
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
# Refetch the issue
|
||||
issue = Issue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
pk=serializer.data["id"],
|
||||
).first()
|
||||
|
||||
# If any of the created_at or created_by is present, update
|
||||
# the issue with the provided data, else return with the
|
||||
# default states given.
|
||||
issue.created_at = request.data.get(
|
||||
"created_at", timezone.now()
|
||||
)
|
||||
issue.created_by_id = request.data.get(
|
||||
"created_by", request.user.id
|
||||
)
|
||||
issue.save(update_fields=["created_at", "created_by"])
|
||||
|
||||
issue_activity.delay(
|
||||
type="issue.activity.created",
|
||||
requested_data=json.dumps(
|
||||
self.request.data, cls=DjangoJSONEncoder
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(serializer.data.get("id", None)),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(
|
||||
serializer.data, status=status.HTTP_201_CREATED
|
||||
)
|
||||
return Response(
|
||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
else:
|
||||
return Response(
|
||||
{"error": "external_id and external_source are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def patch(self, request, slug, project_id, pk=None):
|
||||
issue = Issue.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
@@ -379,6 +529,19 @@ class IssueAPIEndpoint(BaseAPIView):
|
||||
issue = Issue.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
if issue.created_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the issue"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
current_instance = json.dumps(
|
||||
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
@@ -587,14 +750,20 @@ class IssueLinkAPIEndpoint(BaseAPIView):
|
||||
project_id=project_id,
|
||||
issue_id=issue_id,
|
||||
)
|
||||
|
||||
link = IssueLink.objects.get(pk=serializer.data["id"])
|
||||
link.created_by_id = request.data.get(
|
||||
"created_by", request.user.id
|
||||
)
|
||||
link.save(update_fields=["created_by"])
|
||||
issue_activity.delay(
|
||||
type="link.activity.created",
|
||||
requested_data=json.dumps(
|
||||
serializer.data, cls=DjangoJSONEncoder
|
||||
),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id")),
|
||||
project_id=str(self.kwargs.get("project_id")),
|
||||
actor_id=str(link.created_by_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
@@ -748,12 +917,24 @@ class IssueCommentAPIEndpoint(BaseAPIView):
|
||||
issue_id=issue_id,
|
||||
actor=request.user,
|
||||
)
|
||||
issue_comment = IssueComment.objects.get(
|
||||
pk=serializer.data.get("id")
|
||||
)
|
||||
# Update the created_at and the created_by and save the comment
|
||||
issue_comment.created_at = request.data.get(
|
||||
"created_at", timezone.now()
|
||||
)
|
||||
issue_comment.created_by_id = request.data.get(
|
||||
"created_by", request.user.id
|
||||
)
|
||||
issue_comment.save(update_fields=["created_at", "created_by"])
|
||||
|
||||
issue_activity.delay(
|
||||
type="comment.activity.created",
|
||||
requested_data=json.dumps(
|
||||
serializer.data, cls=DjangoJSONEncoder
|
||||
),
|
||||
actor_id=str(self.request.user.id),
|
||||
actor_id=str(issue_comment.created_by_id),
|
||||
issue_id=str(self.kwargs.get("issue_id")),
|
||||
project_id=str(self.kwargs.get("project_id")),
|
||||
current_instance=None,
|
||||
@@ -874,3 +1055,83 @@ class IssueActivityAPIEndpoint(BaseAPIView):
|
||||
expand=self.expand,
|
||||
).data,
|
||||
)
|
||||
|
||||
|
||||
class IssueAttachmentEndpoint(BaseAPIView):
|
||||
serializer_class = IssueAttachmentSerializer
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
model = IssueAttachment
|
||||
parser_classes = (MultiPartParser, FormParser)
|
||||
|
||||
def post(self, request, slug, project_id, issue_id):
|
||||
serializer = IssueAttachmentSerializer(data=request.data)
|
||||
if (
|
||||
request.data.get("external_id")
|
||||
and request.data.get("external_source")
|
||||
and IssueAttachment.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
issue_id=issue_id,
|
||||
external_source=request.data.get("external_source"),
|
||||
external_id=request.data.get("external_id"),
|
||||
).exists()
|
||||
):
|
||||
issue_attachment = IssueAttachment.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
external_id=request.data.get("external_id"),
|
||||
external_source=request.data.get("external_source"),
|
||||
).first()
|
||||
return Response(
|
||||
{
|
||||
"error": "Issue attachment with the same external id and external source already exists",
|
||||
"id": str(issue_attachment.id),
|
||||
},
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save(project_id=project_id, issue_id=issue_id)
|
||||
issue_activity.delay(
|
||||
type="attachment.activity.created",
|
||||
requested_data=None,
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=json.dumps(
|
||||
serializer.data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request, slug, project_id, issue_id, pk):
|
||||
issue_attachment = IssueAttachment.objects.get(pk=pk)
|
||||
issue_attachment.asset.delete(save=False)
|
||||
issue_attachment.delete()
|
||||
issue_activity.delay(
|
||||
type="attachment.activity.deleted",
|
||||
requested_data=None,
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def get(self, request, slug, project_id, issue_id):
|
||||
issue_attachments = IssueAttachment.objects.filter(
|
||||
issue_id=issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = IssueAttachmentSerializer(issue_attachments, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
153
apiserver/plane/api/views/member.py
Normal file
153
apiserver/plane/api/views/member.py
Normal file
@@ -0,0 +1,153 @@
|
||||
# Python imports
|
||||
import uuid
|
||||
|
||||
# Django imports
|
||||
from django.contrib.auth.hashers import make_password
|
||||
from django.core.validators import validate_email
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
# Module imports
|
||||
from .base import BaseAPIView
|
||||
from plane.api.serializers import UserLiteSerializer
|
||||
from plane.db.models import (
|
||||
User,
|
||||
Workspace,
|
||||
Project,
|
||||
WorkspaceMember,
|
||||
ProjectMember,
|
||||
)
|
||||
|
||||
from plane.app.permissions import (
|
||||
ProjectMemberPermission,
|
||||
)
|
||||
|
||||
|
||||
# API endpoint to get and insert users inside the workspace
|
||||
class ProjectMemberAPIEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectMemberPermission,
|
||||
]
|
||||
|
||||
# Get all the users that are present inside the workspace
|
||||
def get(self, request, slug, project_id):
|
||||
# Check if the workspace exists
|
||||
if not Workspace.objects.filter(slug=slug).exists():
|
||||
return Response(
|
||||
{"error": "Provided workspace does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the workspace members that are present inside the workspace
|
||||
project_members = ProjectMember.objects.filter(
|
||||
project_id=project_id, workspace__slug=slug
|
||||
).values_list("member_id", flat=True)
|
||||
|
||||
# Get all the users that are present inside the workspace
|
||||
users = UserLiteSerializer(
|
||||
User.objects.filter(
|
||||
id__in=project_members,
|
||||
),
|
||||
many=True,
|
||||
).data
|
||||
|
||||
return Response(users, status=status.HTTP_200_OK)
|
||||
|
||||
# Insert a new user inside the workspace, and assign the user to the project
|
||||
def post(self, request, slug, project_id):
|
||||
# Check if user with email already exists, and send bad request if it's
|
||||
# not present, check for workspace and valid project mandat
|
||||
# ------------------- Validation -------------------
|
||||
if (
|
||||
request.data.get("email") is None
|
||||
or request.data.get("display_name") is None
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error": "Expected email, display_name, workspace_slug, project_id, one or more of the fields are missing."
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
email = request.data.get("email")
|
||||
|
||||
try:
|
||||
validate_email(email)
|
||||
except ValidationError:
|
||||
return Response(
|
||||
{"error": "Invalid email provided"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.filter(slug=slug).first()
|
||||
project = Project.objects.filter(pk=project_id).first()
|
||||
|
||||
if not all([workspace, project]):
|
||||
return Response(
|
||||
{"error": "Provided workspace or project does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check if user exists
|
||||
user = User.objects.filter(email=email).first()
|
||||
workspace_member = None
|
||||
project_member = None
|
||||
|
||||
if user:
|
||||
# Check if user is part of the workspace
|
||||
workspace_member = WorkspaceMember.objects.filter(
|
||||
workspace=workspace, member=user
|
||||
).first()
|
||||
if workspace_member:
|
||||
# Check if user is part of the project
|
||||
project_member = ProjectMember.objects.filter(
|
||||
project=project, member=user
|
||||
).first()
|
||||
if project_member:
|
||||
return Response(
|
||||
{
|
||||
"error": "User is already part of the workspace and project"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# If user does not exist, create the user
|
||||
if not user:
|
||||
user = User.objects.create(
|
||||
email=email,
|
||||
display_name=request.data.get("display_name"),
|
||||
first_name=request.data.get("first_name", ""),
|
||||
last_name=request.data.get("last_name", ""),
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
is_active=False,
|
||||
)
|
||||
user.save()
|
||||
|
||||
# Create a workspace member for the user if not already a member
|
||||
if not workspace_member:
|
||||
workspace_member = WorkspaceMember.objects.create(
|
||||
workspace=workspace,
|
||||
member=user,
|
||||
role=request.data.get("role", 10),
|
||||
)
|
||||
workspace_member.save()
|
||||
|
||||
# Create a project member for the user if not already a member
|
||||
if not project_member:
|
||||
project_member = ProjectMember.objects.create(
|
||||
project=project,
|
||||
member=user,
|
||||
role=request.data.get("role", 10),
|
||||
)
|
||||
project_member.save()
|
||||
|
||||
# Serialize the user and return the response
|
||||
user_data = UserLiteSerializer(user).data
|
||||
|
||||
return Response(user_data, status=status.HTTP_201_CREATED)
|
||||
|
||||
@@ -18,7 +18,7 @@ from plane.api.serializers import (
|
||||
ModuleSerializer,
|
||||
)
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueAttachment,
|
||||
@@ -27,6 +27,8 @@ from plane.db.models import (
|
||||
ModuleIssue,
|
||||
ModuleLink,
|
||||
Project,
|
||||
ProjectMember,
|
||||
UserFavorite,
|
||||
)
|
||||
|
||||
from .base import BaseAPIView
|
||||
@@ -265,6 +267,20 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
module = Module.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
if module.created_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the module"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
module_issues = list(
|
||||
ModuleIssue.objects.filter(module_id=pk).values_list(
|
||||
"issue", flat=True
|
||||
@@ -282,10 +298,25 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"module_name": str(module.name),
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
module.delete()
|
||||
# Delete the module issues
|
||||
ModuleIssue.objects.filter(
|
||||
module=pk,
|
||||
project_id=project_id,
|
||||
).delete()
|
||||
# Delete the user favorite module
|
||||
UserFavorite.objects.filter(
|
||||
entity_type="module",
|
||||
entity_identifier=pk,
|
||||
project_id=project_id,
|
||||
).delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@@ -493,7 +524,6 @@ class ModuleIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
|
||||
class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
@@ -608,6 +638,12 @@ class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
module.archived_at = timezone.now()
|
||||
module.save()
|
||||
UserFavorite.objects.filter(
|
||||
entity_type="module",
|
||||
entity_identifier=pk,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
).delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def delete(self, request, slug, project_id, pk):
|
||||
|
||||
@@ -19,13 +19,14 @@ from plane.app.permissions import ProjectBasePermission
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
Inbox,
|
||||
IssueProperty,
|
||||
IssueUserProperty,
|
||||
Module,
|
||||
Project,
|
||||
DeployBoard,
|
||||
ProjectMember,
|
||||
State,
|
||||
Workspace,
|
||||
UserFavorite,
|
||||
)
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from .base import BaseAPIView
|
||||
@@ -165,7 +166,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
role=20,
|
||||
)
|
||||
# Also create the issue property for the user
|
||||
_ = IssueProperty.objects.create(
|
||||
_ = IssueUserProperty.objects.create(
|
||||
project_id=serializer.data["id"],
|
||||
user=request.user,
|
||||
)
|
||||
@@ -179,7 +180,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
role=20,
|
||||
)
|
||||
# Also create the issue property for the user
|
||||
IssueProperty.objects.create(
|
||||
IssueUserProperty.objects.create(
|
||||
project_id=serializer.data["id"],
|
||||
user_id=serializer.data["project_lead"],
|
||||
)
|
||||
@@ -240,6 +241,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
.filter(pk=serializer.data["id"])
|
||||
.first()
|
||||
)
|
||||
|
||||
# Model activity
|
||||
model_activity.delay(
|
||||
model_name="project",
|
||||
@@ -355,6 +357,12 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
|
||||
def delete(self, request, slug, pk):
|
||||
project = Project.objects.get(pk=pk, workspace__slug=slug)
|
||||
# Delete the user favorite cycle
|
||||
UserFavorite.objects.filter(
|
||||
entity_type="project",
|
||||
entity_identifier=pk,
|
||||
project_id=pk,
|
||||
).delete()
|
||||
project.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -369,6 +377,10 @@ class ProjectArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
project.archived_at = timezone.now()
|
||||
project.save()
|
||||
UserFavorite.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project=project_id,
|
||||
).delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def delete(self, request, slug, project_id):
|
||||
|
||||
@@ -12,3 +12,4 @@ from .project import (
|
||||
ProjectMemberPermission,
|
||||
ProjectLitePermission,
|
||||
)
|
||||
from .base import allow_permission, ROLE
|
||||
61
apiserver/plane/app/permissions/base.py
Normal file
61
apiserver/plane/app/permissions/base.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from plane.db.models import WorkspaceMember, ProjectMember
|
||||
from functools import wraps
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from enum import Enum
|
||||
|
||||
class ROLE(Enum):
|
||||
ADMIN = 20
|
||||
MEMBER = 15
|
||||
VIEWER = 10
|
||||
GUEST = 5
|
||||
|
||||
|
||||
def allow_permission(allowed_roles, level="PROJECT", creator=False, model=None):
|
||||
def decorator(view_func):
|
||||
@wraps(view_func)
|
||||
def _wrapped_view(instance, request, *args, **kwargs):
|
||||
|
||||
# Check for creator if required
|
||||
if creator and model:
|
||||
obj = model.objects.filter(
|
||||
id=kwargs["pk"], created_by=request.user
|
||||
).exists()
|
||||
if obj:
|
||||
return view_func(instance, request, *args, **kwargs)
|
||||
|
||||
# Convert allowed_roles to their values if they are enum members
|
||||
allowed_role_values = [
|
||||
role.value if isinstance(role, ROLE) else role
|
||||
for role in allowed_roles
|
||||
]
|
||||
|
||||
# Check role permissions
|
||||
if level == "WORKSPACE":
|
||||
if WorkspaceMember.objects.filter(
|
||||
member=request.user,
|
||||
workspace__slug=kwargs["slug"],
|
||||
role__in=allowed_role_values,
|
||||
is_active=True,
|
||||
).exists():
|
||||
return view_func(instance, request, *args, **kwargs)
|
||||
else:
|
||||
if ProjectMember.objects.filter(
|
||||
member=request.user,
|
||||
workspace__slug=kwargs["slug"],
|
||||
project_id=kwargs["project_id"],
|
||||
role__in=allowed_role_values,
|
||||
is_active=True,
|
||||
).exists():
|
||||
return view_func(instance, request, *args, **kwargs)
|
||||
|
||||
# Return permission denied if no conditions are met
|
||||
return Response(
|
||||
{"error": "You don't have the required permissions."},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
return _wrapped_view
|
||||
|
||||
return decorator
|
||||
@@ -50,7 +50,7 @@ from .issue import (
|
||||
IssueCreateSerializer,
|
||||
IssueActivitySerializer,
|
||||
IssueCommentSerializer,
|
||||
IssuePropertySerializer,
|
||||
IssueUserPropertySerializer,
|
||||
IssueAssigneeSerializer,
|
||||
LabelSerializer,
|
||||
IssueSerializer,
|
||||
@@ -91,6 +91,8 @@ from .page import (
|
||||
PageLogSerializer,
|
||||
SubPageSerializer,
|
||||
PageDetailSerializer,
|
||||
PageVersionSerializer,
|
||||
PageVersionDetailSerializer,
|
||||
)
|
||||
|
||||
from .estimate import (
|
||||
@@ -120,3 +122,5 @@ from .exporter import ExporterHistorySerializer
|
||||
from .webhook import WebhookSerializer, WebhookLogSerializer
|
||||
|
||||
from .dashboard import DashboardSerializer, WidgetSerializer
|
||||
|
||||
from .favorite import UserFavoriteSerializer
|
||||
|
||||
101
apiserver/plane/app/serializers/favorite.py
Normal file
101
apiserver/plane/app/serializers/favorite.py
Normal file
@@ -0,0 +1,101 @@
|
||||
from rest_framework import serializers
|
||||
|
||||
from plane.db.models import (
|
||||
UserFavorite,
|
||||
Cycle,
|
||||
Module,
|
||||
Issue,
|
||||
IssueView,
|
||||
Page,
|
||||
Project,
|
||||
)
|
||||
|
||||
|
||||
class ProjectFavoriteLiteSerializer(serializers.ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = ["id", "name", "logo_props"]
|
||||
|
||||
|
||||
class PageFavoriteLiteSerializer(serializers.ModelSerializer):
|
||||
project_id = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Page
|
||||
fields = ["id", "name", "logo_props", "project_id"]
|
||||
|
||||
def get_project_id(self, obj):
|
||||
project = (
|
||||
obj.projects.first()
|
||||
) # This gets the first project related to the Page
|
||||
return project.id if project else None
|
||||
|
||||
|
||||
class CycleFavoriteLiteSerializer(serializers.ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Cycle
|
||||
fields = ["id", "name", "logo_props", "project_id"]
|
||||
|
||||
|
||||
class ModuleFavoriteLiteSerializer(serializers.ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Module
|
||||
fields = ["id", "name", "logo_props", "project_id"]
|
||||
|
||||
|
||||
class ViewFavoriteSerializer(serializers.ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = IssueView
|
||||
fields = ["id", "name", "logo_props", "project_id"]
|
||||
|
||||
|
||||
def get_entity_model_and_serializer(entity_type):
|
||||
entity_map = {
|
||||
"cycle": (Cycle, CycleFavoriteLiteSerializer),
|
||||
"issue": (Issue, None),
|
||||
"module": (Module, ModuleFavoriteLiteSerializer),
|
||||
"view": (IssueView, ViewFavoriteSerializer),
|
||||
"page": (Page, PageFavoriteLiteSerializer),
|
||||
"project": (Project, ProjectFavoriteLiteSerializer),
|
||||
"folder": (None, None),
|
||||
}
|
||||
return entity_map.get(entity_type, (None, None))
|
||||
|
||||
|
||||
class UserFavoriteSerializer(serializers.ModelSerializer):
|
||||
entity_data = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = UserFavorite
|
||||
fields = [
|
||||
"id",
|
||||
"entity_type",
|
||||
"entity_identifier",
|
||||
"entity_data",
|
||||
"name",
|
||||
"is_folder",
|
||||
"sequence",
|
||||
"parent",
|
||||
"workspace_id",
|
||||
"project_id",
|
||||
]
|
||||
read_only_fields = ["workspace", "created_by", "updated_by"]
|
||||
|
||||
def get_entity_data(self, obj):
|
||||
entity_type = obj.entity_type
|
||||
entity_identifier = obj.entity_identifier
|
||||
|
||||
entity_model, entity_serializer = get_entity_model_and_serializer(
|
||||
entity_type
|
||||
)
|
||||
if entity_model and entity_serializer:
|
||||
try:
|
||||
entity = entity_model.objects.get(pk=entity_identifier)
|
||||
return entity_serializer(entity).data
|
||||
except entity_model.DoesNotExist:
|
||||
return None
|
||||
return None
|
||||
@@ -17,7 +17,7 @@ from plane.db.models import (
|
||||
Issue,
|
||||
IssueActivity,
|
||||
IssueComment,
|
||||
IssueProperty,
|
||||
IssueUserProperty,
|
||||
IssueAssignee,
|
||||
IssueSubscriber,
|
||||
IssueLabel,
|
||||
@@ -135,7 +135,11 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
workspace_id = self.context["workspace_id"]
|
||||
default_assignee_id = self.context["default_assignee_id"]
|
||||
|
||||
issue = Issue.objects.create(**validated_data, project_id=project_id)
|
||||
# Create Issue
|
||||
issue = Issue.objects.create(
|
||||
**validated_data,
|
||||
project_id=project_id,
|
||||
)
|
||||
|
||||
# Issue Audit Users
|
||||
created_by_id = issue.created_by_id
|
||||
@@ -248,9 +252,9 @@ class IssueActivitySerializer(BaseSerializer):
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class IssuePropertySerializer(BaseSerializer):
|
||||
class IssueUserPropertySerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueProperty
|
||||
model = IssueUserProperty
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"user",
|
||||
@@ -433,17 +437,21 @@ class IssueLinkSerializer(BaseSerializer):
|
||||
"issue",
|
||||
]
|
||||
|
||||
def validate_url(self, value):
|
||||
# Check URL format
|
||||
validate_url = URLValidator()
|
||||
try:
|
||||
validate_url(value)
|
||||
except ValidationError:
|
||||
raise serializers.ValidationError("Invalid URL format.")
|
||||
def to_internal_value(self, data):
|
||||
# Modify the URL before validation by appending http:// if missing
|
||||
url = data.get("url", "")
|
||||
if url and not url.startswith(("http://", "https://")):
|
||||
data["url"] = "http://" + url
|
||||
|
||||
# Check URL scheme
|
||||
if not value.startswith(("http://", "https://")):
|
||||
raise serializers.ValidationError("Invalid URL scheme.")
|
||||
return super().to_internal_value(data)
|
||||
|
||||
def validate_url(self, value):
|
||||
# Use Django's built-in URLValidator for validation
|
||||
url_validator = URLValidator()
|
||||
try:
|
||||
url_validator(value)
|
||||
except ValidationError:
|
||||
raise serializers.ValidationError({"error": "Invalid URL format."})
|
||||
|
||||
return value
|
||||
|
||||
@@ -459,10 +467,14 @@ class IssueLinkSerializer(BaseSerializer):
|
||||
return IssueLink.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
if IssueLink.objects.filter(
|
||||
url=validated_data.get("url"),
|
||||
issue_id=instance.issue_id,
|
||||
).exclude(pk=instance.id).exists():
|
||||
if (
|
||||
IssueLink.objects.filter(
|
||||
url=validated_data.get("url"),
|
||||
issue_id=instance.issue_id,
|
||||
)
|
||||
.exclude(pk=instance.id)
|
||||
.exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
{"error": "URL already exists for this Issue"}
|
||||
)
|
||||
@@ -509,7 +521,7 @@ class IssueAttachmentLiteSerializer(DynamicBaseSerializer):
|
||||
"attributes",
|
||||
"issue_id",
|
||||
"updated_at",
|
||||
"updated_by_id",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
@@ -525,6 +537,7 @@ class IssueReactionSerializer(BaseSerializer):
|
||||
"project",
|
||||
"issue",
|
||||
"actor",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
|
||||
@@ -543,7 +556,13 @@ class CommentReactionSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = CommentReaction
|
||||
fields = "__all__"
|
||||
read_only_fields = ["workspace", "project", "comment", "actor"]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"comment",
|
||||
"actor",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
|
||||
class IssueVoteSerializer(BaseSerializer):
|
||||
|
||||
@@ -5,6 +5,10 @@ from rest_framework import serializers
|
||||
from .base import BaseSerializer, DynamicBaseSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
|
||||
# Django imports
|
||||
from django.core.validators import URLValidator
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from plane.db.models import (
|
||||
User,
|
||||
Module,
|
||||
@@ -39,6 +43,7 @@ class ModuleWriteSerializer(BaseSerializer):
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"archived_at",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
def to_representation(self, instance):
|
||||
@@ -154,16 +159,48 @@ class ModuleLinkSerializer(BaseSerializer):
|
||||
"module",
|
||||
]
|
||||
|
||||
# Validation if url already exists
|
||||
def to_internal_value(self, data):
|
||||
# Modify the URL before validation by appending http:// if missing
|
||||
url = data.get("url", "")
|
||||
if url and not url.startswith(("http://", "https://")):
|
||||
data["url"] = "http://" + url
|
||||
|
||||
return super().to_internal_value(data)
|
||||
|
||||
def validate_url(self, value):
|
||||
# Use Django's built-in URLValidator for validation
|
||||
url_validator = URLValidator()
|
||||
try:
|
||||
url_validator(value)
|
||||
except ValidationError:
|
||||
raise serializers.ValidationError({"error": "Invalid URL format."})
|
||||
|
||||
return value
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data["url"] = self.validate_url(validated_data.get("url"))
|
||||
if ModuleLink.objects.filter(
|
||||
url=validated_data.get("url"),
|
||||
module_id=validated_data.get("module_id"),
|
||||
).exists():
|
||||
raise serializers.ValidationError({"error": "URL already exists."})
|
||||
return super().create(validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
validated_data["url"] = self.validate_url(validated_data.get("url"))
|
||||
if (
|
||||
ModuleLink.objects.filter(
|
||||
url=validated_data.get("url"),
|
||||
module_id=instance.module_id,
|
||||
)
|
||||
.exclude(pk=instance.id)
|
||||
.exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
{"error": "URL already exists for this Issue"}
|
||||
)
|
||||
return ModuleLink.objects.create(**validated_data)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class ModuleSerializer(DynamicBaseSerializer):
|
||||
@@ -177,8 +214,8 @@ class ModuleSerializer(DynamicBaseSerializer):
|
||||
started_issues = serializers.IntegerField(read_only=True)
|
||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_issues = serializers.IntegerField(read_only=True)
|
||||
total_estimate_points = serializers.IntegerField(read_only=True)
|
||||
completed_estimate_points = serializers.IntegerField(read_only=True)
|
||||
total_estimate_points = serializers.FloatField(read_only=True)
|
||||
completed_estimate_points = serializers.FloatField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Module
|
||||
@@ -222,13 +259,20 @@ class ModuleSerializer(DynamicBaseSerializer):
|
||||
class ModuleDetailSerializer(ModuleSerializer):
|
||||
link_module = ModuleLinkSerializer(read_only=True, many=True)
|
||||
sub_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_estimate_points = serializers.IntegerField(read_only=True)
|
||||
unstarted_estimate_points = serializers.IntegerField(read_only=True)
|
||||
started_estimate_points = serializers.IntegerField(read_only=True)
|
||||
cancelled_estimate_points = serializers.IntegerField(read_only=True)
|
||||
backlog_estimate_points = serializers.FloatField(read_only=True)
|
||||
unstarted_estimate_points = serializers.FloatField(read_only=True)
|
||||
started_estimate_points = serializers.FloatField(read_only=True)
|
||||
cancelled_estimate_points = serializers.FloatField(read_only=True)
|
||||
|
||||
class Meta(ModuleSerializer.Meta):
|
||||
fields = ModuleSerializer.Meta.fields + ["link_module", "sub_issues", "backlog_estimate_points", "unstarted_estimate_points", "started_estimate_points", "cancelled_estimate_points"]
|
||||
fields = ModuleSerializer.Meta.fields + [
|
||||
"link_module",
|
||||
"sub_issues",
|
||||
"backlog_estimate_points",
|
||||
"unstarted_estimate_points",
|
||||
"started_estimate_points",
|
||||
"cancelled_estimate_points",
|
||||
]
|
||||
|
||||
|
||||
class ModuleUserPropertiesSerializer(BaseSerializer):
|
||||
|
||||
@@ -3,11 +3,16 @@ from .base import BaseSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from plane.db.models import Notification, UserNotificationPreference
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
class NotificationSerializer(BaseSerializer):
|
||||
triggered_by_details = UserLiteSerializer(
|
||||
read_only=True, source="triggered_by"
|
||||
)
|
||||
is_inbox_issue = serializers.BooleanField(read_only=True)
|
||||
is_mentioned_notification = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Notification
|
||||
|
||||
@@ -10,6 +10,7 @@ from plane.db.models import (
|
||||
Label,
|
||||
ProjectPage,
|
||||
Project,
|
||||
PageVersion,
|
||||
)
|
||||
|
||||
|
||||
@@ -161,3 +162,46 @@ class PageLogSerializer(BaseSerializer):
|
||||
"workspace",
|
||||
"page",
|
||||
]
|
||||
|
||||
|
||||
class PageVersionSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = PageVersion
|
||||
fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"page",
|
||||
"last_saved_at",
|
||||
"owned_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"page",
|
||||
]
|
||||
|
||||
|
||||
class PageVersionDetailSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = PageVersion
|
||||
fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"page",
|
||||
"last_saved_at",
|
||||
"description_binary",
|
||||
"description_html",
|
||||
"description_json",
|
||||
"owned_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"page",
|
||||
]
|
||||
|
||||
@@ -28,6 +28,7 @@ class ProjectSerializer(BaseSerializer):
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
def create(self, validated_data):
|
||||
|
||||
@@ -16,26 +16,39 @@ from .base import BaseSerializer
|
||||
class UserSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = "__all__"
|
||||
# Exclude password field from the serializer
|
||||
fields = [
|
||||
field.name
|
||||
for field in User._meta.fields
|
||||
if field.name != "password"
|
||||
]
|
||||
# Make all system fields and email read only
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"username",
|
||||
"mobile_number",
|
||||
"email",
|
||||
"token",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"is_superuser",
|
||||
"is_staff",
|
||||
"is_managed",
|
||||
"last_active",
|
||||
"last_login_time",
|
||||
"last_logout_time",
|
||||
"last_login_ip",
|
||||
"last_logout_ip",
|
||||
"last_login_uagent",
|
||||
"token_updated_at",
|
||||
"last_location",
|
||||
"last_login_medium",
|
||||
"created_location",
|
||||
"is_bot",
|
||||
"is_password_autoset",
|
||||
"is_email_verified",
|
||||
"is_active",
|
||||
"token_updated_at",
|
||||
]
|
||||
extra_kwargs = {"password": {"write_only": True}}
|
||||
|
||||
# If the user has already filled first name or last name then he is onboarded
|
||||
def get_is_onboarded(self, obj):
|
||||
@@ -208,9 +221,15 @@ class ProfileSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Profile
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"user",
|
||||
]
|
||||
|
||||
|
||||
class AccountSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Account
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"user",
|
||||
]
|
||||
|
||||
@@ -23,7 +23,7 @@ class IssueViewSerializer(DynamicBaseSerializer):
|
||||
]
|
||||
|
||||
def create(self, validated_data):
|
||||
query_params = validated_data.get("query_data", {})
|
||||
query_params = validated_data.get("filters", {})
|
||||
if bool(query_params):
|
||||
validated_data["query"] = issue_filters(query_params, "POST")
|
||||
else:
|
||||
@@ -31,7 +31,7 @@ class IssueViewSerializer(DynamicBaseSerializer):
|
||||
return IssueView.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
query_params = validated_data.get("query_data", {})
|
||||
query_params = validated_data.get("filters", {})
|
||||
if bool(query_params):
|
||||
validated_data["query"] = issue_filters(query_params, "POST")
|
||||
else:
|
||||
|
||||
@@ -15,6 +15,7 @@ from plane.db.models import (
|
||||
WorkspaceTheme,
|
||||
WorkspaceUserProperties,
|
||||
)
|
||||
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
|
||||
|
||||
|
||||
class WorkSpaceSerializer(DynamicBaseSerializer):
|
||||
@@ -22,22 +23,11 @@ class WorkSpaceSerializer(DynamicBaseSerializer):
|
||||
total_members = serializers.IntegerField(read_only=True)
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
|
||||
def validated(self, data):
|
||||
if data.get("slug") in [
|
||||
"404",
|
||||
"accounts",
|
||||
"api",
|
||||
"create-workspace",
|
||||
"god-mode",
|
||||
"installations",
|
||||
"invitations",
|
||||
"onboarding",
|
||||
"profile",
|
||||
"spaces",
|
||||
"workspace-invitations",
|
||||
"password",
|
||||
]:
|
||||
raise serializers.ValidationError({"slug": "Slug is not valid"})
|
||||
def validate_slug(self, value):
|
||||
# Check if the slug is restricted
|
||||
if value in RESTRICTED_WORKSPACE_SLUGS:
|
||||
raise serializers.ValidationError("Slug is not valid")
|
||||
return value
|
||||
|
||||
class Meta:
|
||||
model = Workspace
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from django.urls import path
|
||||
from plane.app.views import ApiTokenEndpoint
|
||||
from plane.app.views import ApiTokenEndpoint, ServiceApiTokenEndpoint
|
||||
|
||||
urlpatterns = [
|
||||
# API Tokens
|
||||
@@ -13,5 +13,10 @@ urlpatterns = [
|
||||
ApiTokenEndpoint.as_view(),
|
||||
name="api-tokens",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/service-api-tokens/",
|
||||
ServiceApiTokenEndpoint.as_view(),
|
||||
name="service-api-tokens",
|
||||
),
|
||||
## End API Tokens
|
||||
]
|
||||
|
||||
@@ -6,6 +6,8 @@ from plane.app.views import (
|
||||
CycleIssueViewSet,
|
||||
CycleDateCheckEndpoint,
|
||||
CycleFavoriteViewSet,
|
||||
CycleProgressEndpoint,
|
||||
CycleAnalyticsEndpoint,
|
||||
TransferCycleIssueEndpoint,
|
||||
CycleUserPropertiesEndpoint,
|
||||
CycleArchiveUnarchiveEndpoint,
|
||||
@@ -106,4 +108,14 @@ urlpatterns = [
|
||||
CycleArchiveUnarchiveEndpoint.as_view(),
|
||||
name="cycle-archive-unarchive",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/progress/",
|
||||
CycleProgressEndpoint.as_view(),
|
||||
name="project-cycle",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/analytics/",
|
||||
CycleAnalyticsEndpoint.as_view(),
|
||||
name="project-cycle",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -40,7 +40,7 @@ urlpatterns = [
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:issue_id>/",
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:pk>/",
|
||||
InboxIssueViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
|
||||
@@ -19,8 +19,8 @@ from plane.app.views import (
|
||||
IssueUserDisplayPropertyEndpoint,
|
||||
IssueViewSet,
|
||||
LabelViewSet,
|
||||
BulkIssueOperationsEndpoint,
|
||||
BulkArchiveIssuesEndpoint,
|
||||
IssuePaginatedViewSet,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
@@ -39,6 +39,12 @@ urlpatterns = [
|
||||
),
|
||||
name="project-issue",
|
||||
),
|
||||
# updated v1 paginated issues
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/v2/issues/",
|
||||
IssuePaginatedViewSet.as_view({"get": "list"}),
|
||||
name="project-issues-paginated",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
|
||||
IssueViewSet.as_view(
|
||||
@@ -233,13 +239,13 @@ urlpatterns = [
|
||||
name="project-issue-comment-reactions",
|
||||
),
|
||||
## End Comment Reactions
|
||||
## IssueProperty
|
||||
## IssueUserProperty
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-properties/",
|
||||
IssueUserDisplayPropertyEndpoint.as_view(),
|
||||
name="project-issue-display-properties",
|
||||
),
|
||||
## IssueProperty End
|
||||
## IssueUserProperty End
|
||||
## Issue Archives
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
|
||||
@@ -305,9 +311,4 @@ urlpatterns = [
|
||||
),
|
||||
name="project-issue-draft",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-operation-issues/",
|
||||
BulkIssueOperationsEndpoint.as_view(),
|
||||
name="bulk-operations-issues",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -7,6 +7,7 @@ from plane.app.views import (
|
||||
PageLogEndpoint,
|
||||
SubPagesEndpoint,
|
||||
PagesDescriptionViewSet,
|
||||
PageVersionEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -65,6 +66,16 @@ urlpatterns = [
|
||||
),
|
||||
name="project-pages-lock-unlock",
|
||||
),
|
||||
# private and public page
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/access/",
|
||||
PageViewSet.as_view(
|
||||
{
|
||||
"post": "access",
|
||||
}
|
||||
),
|
||||
name="project-pages-access",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/transactions/",
|
||||
PageLogEndpoint.as_view(),
|
||||
@@ -90,4 +101,14 @@ urlpatterns = [
|
||||
),
|
||||
name="page-description",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/versions/",
|
||||
PageVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/versions/<uuid:pk>/",
|
||||
PageVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -25,6 +25,8 @@ from plane.app.views import (
|
||||
ExportWorkspaceUserActivityEndpoint,
|
||||
WorkspaceModulesEndpoint,
|
||||
WorkspaceCyclesEndpoint,
|
||||
WorkspaceFavoriteEndpoint,
|
||||
WorkspaceFavoriteGroupEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -237,4 +239,19 @@ urlpatterns = [
|
||||
WorkspaceCyclesEndpoint.as_view(),
|
||||
name="workspace-cycles",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-favorites/",
|
||||
WorkspaceFavoriteEndpoint.as_view(),
|
||||
name="workspace-user-favorites",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-favorites/<uuid:favorite_id>/",
|
||||
WorkspaceFavoriteEndpoint.as_view(),
|
||||
name="workspace-user-favorites",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-favorites/<uuid:favorite_id>/group/",
|
||||
WorkspaceFavoriteGroupEndpoint.as_view(),
|
||||
name="workspace-user-favorites-groups",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -40,6 +40,11 @@ from .workspace.base import (
|
||||
ExportWorkspaceUserActivityEndpoint,
|
||||
)
|
||||
|
||||
from .workspace.favorite import (
|
||||
WorkspaceFavoriteEndpoint,
|
||||
WorkspaceFavoriteGroupEndpoint,
|
||||
)
|
||||
|
||||
from .workspace.member import (
|
||||
WorkSpaceMemberViewSet,
|
||||
TeamMemberViewSet,
|
||||
@@ -93,6 +98,8 @@ from .cycle.base import (
|
||||
CycleUserPropertiesEndpoint,
|
||||
CycleViewSet,
|
||||
TransferCycleIssueEndpoint,
|
||||
CycleAnalyticsEndpoint,
|
||||
CycleProgressEndpoint,
|
||||
)
|
||||
from .cycle.issue import (
|
||||
CycleIssueViewSet,
|
||||
@@ -107,6 +114,7 @@ from .issue.base import (
|
||||
IssueViewSet,
|
||||
IssueUserDisplayPropertyEndpoint,
|
||||
BulkDeleteIssuesEndpoint,
|
||||
IssuePaginatedViewSet,
|
||||
)
|
||||
|
||||
from .issue.activity import (
|
||||
@@ -151,9 +159,6 @@ from .issue.subscriber import (
|
||||
IssueSubscriberViewSet,
|
||||
)
|
||||
|
||||
|
||||
from .issue.bulk_operations import BulkIssueOperationsEndpoint
|
||||
|
||||
from .module.base import (
|
||||
ModuleViewSet,
|
||||
ModuleLinkViewSet,
|
||||
@@ -169,8 +174,10 @@ from .module.archive import (
|
||||
ModuleArchiveUnarchiveEndpoint,
|
||||
)
|
||||
|
||||
from .api import ApiTokenEndpoint
|
||||
|
||||
from .api import (
|
||||
ApiTokenEndpoint,
|
||||
ServiceApiTokenEndpoint,
|
||||
)
|
||||
|
||||
from .page.base import (
|
||||
PageViewSet,
|
||||
@@ -179,6 +186,7 @@ from .page.base import (
|
||||
SubPagesEndpoint,
|
||||
PagesDescriptionViewSet,
|
||||
)
|
||||
from .page.version import PageVersionEndpoint
|
||||
|
||||
from .search.base import GlobalSearchEndpoint
|
||||
from .search.issue import IssueSearchEndpoint
|
||||
|
||||
@@ -7,22 +7,22 @@ from django.utils import timezone
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.app.permissions import WorkSpaceAdminPermission
|
||||
from plane.app.serializers import AnalyticViewSerializer
|
||||
|
||||
# Module imports
|
||||
from plane.app.views.base import BaseAPIView, BaseViewSet
|
||||
from plane.bgtasks.analytic_plot_export import analytic_export_task
|
||||
from plane.db.models import AnalyticView, Issue, Workspace
|
||||
from plane.utils.analytics_plot import build_graph_plot
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
|
||||
|
||||
class AnalyticsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
@allow_permission(
|
||||
[ROLE.ADMIN, ROLE.MEMBER, ROLE.VIEWER], level="WORKSPACE"
|
||||
)
|
||||
def get(self, request, slug):
|
||||
x_axis = request.GET.get("x_axis", False)
|
||||
y_axis = request.GET.get("y_axis", False)
|
||||
@@ -201,10 +201,10 @@ class AnalyticViewViewset(BaseViewSet):
|
||||
|
||||
|
||||
class SavedAnalyticEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
@allow_permission(
|
||||
[ROLE.ADMIN, ROLE.MEMBER, ROLE.VIEWER], level="WORKSPACE"
|
||||
)
|
||||
def get(self, request, slug, analytic_id):
|
||||
analytic_view = AnalyticView.objects.get(
|
||||
pk=analytic_id, workspace__slug=slug
|
||||
@@ -234,10 +234,10 @@ class SavedAnalyticEndpoint(BaseAPIView):
|
||||
|
||||
|
||||
class ExportAnalyticsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
@allow_permission(
|
||||
[ROLE.ADMIN, ROLE.MEMBER, ROLE.VIEWER], level="WORKSPACE"
|
||||
)
|
||||
def post(self, request, slug):
|
||||
x_axis = request.data.get("x_axis", False)
|
||||
y_axis = request.data.get("y_axis", False)
|
||||
@@ -301,10 +301,10 @@ class ExportAnalyticsEndpoint(BaseAPIView):
|
||||
|
||||
|
||||
class DefaultAnalyticsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
@allow_permission(
|
||||
[ROLE.ADMIN, ROLE.MEMBER, ROLE.VIEWER, ROLE.GUEST], level="WORKSPACE"
|
||||
)
|
||||
def get(self, request, slug):
|
||||
filters = issue_filters(request.GET, "GET")
|
||||
base_issues = Issue.issue_objects.filter(
|
||||
@@ -380,12 +380,10 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
||||
.order_by("-count")
|
||||
)
|
||||
|
||||
open_estimate_sum = open_issues_queryset.aggregate(
|
||||
sum=Sum("point")
|
||||
)["sum"]
|
||||
total_estimate_sum = base_issues.aggregate(sum=Sum("point"))[
|
||||
open_estimate_sum = open_issues_queryset.aggregate(sum=Sum("point"))[
|
||||
"sum"
|
||||
]
|
||||
total_estimate_sum = base_issues.aggregate(sum=Sum("point"))["sum"]
|
||||
|
||||
return Response(
|
||||
{
|
||||
|
||||
@@ -45,7 +45,7 @@ class ApiTokenEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, pk=None):
|
||||
if pk is None:
|
||||
api_tokens = APIToken.objects.filter(
|
||||
user=request.user, workspace__slug=slug
|
||||
user=request.user, workspace__slug=slug, is_service=False
|
||||
)
|
||||
serializer = APITokenReadSerializer(api_tokens, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -61,6 +61,7 @@ class ApiTokenEndpoint(BaseAPIView):
|
||||
workspace__slug=slug,
|
||||
user=request.user,
|
||||
pk=pk,
|
||||
is_service=False,
|
||||
)
|
||||
api_token.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -78,3 +79,44 @@ class ApiTokenEndpoint(BaseAPIView):
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class ServiceApiTokenEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceOwnerPermission,
|
||||
]
|
||||
|
||||
def post(self, request, slug):
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
api_token = APIToken.objects.filter(
|
||||
workspace=workspace,
|
||||
is_service=True,
|
||||
).first()
|
||||
|
||||
if api_token:
|
||||
return Response(
|
||||
{
|
||||
"token": str(api_token.token),
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
else:
|
||||
# Check the user type
|
||||
user_type = 1 if request.user.is_bot else 0
|
||||
|
||||
api_token = APIToken.objects.create(
|
||||
label=str(uuid4().hex),
|
||||
description="Service Token",
|
||||
user=request.user,
|
||||
workspace=workspace,
|
||||
user_type=user_type,
|
||||
is_service=True,
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"token": str(api_token.token),
|
||||
},
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user