mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Compare commits
501 Commits
feat-stick
...
fix-live-s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3e895da0e8 | ||
|
|
483961ea5a | ||
|
|
ecefd47823 | ||
|
|
3359172acf | ||
|
|
18a3771315 | ||
|
|
31c09d5a48 | ||
|
|
d62ac6269b | ||
|
|
1315acf952 | ||
|
|
728f517cb1 | ||
|
|
cb3b6fbd8d | ||
|
|
70dbad648d | ||
|
|
d9e3405f5a | ||
|
|
adee686ea3 | ||
|
|
81fae36c23 | ||
|
|
3f652ba44e | ||
|
|
16aa1d7034 | ||
|
|
0db581509c | ||
|
|
523ab3f4a1 | ||
|
|
ad9888ce45 | ||
|
|
a57c37c26c | ||
|
|
4eef115fcd | ||
|
|
cabd5f4275 | ||
|
|
65a0530cfe | ||
|
|
7bb291408d | ||
|
|
4be94adaca | ||
|
|
2d1b3fb39e | ||
|
|
585432824f | ||
|
|
fe9640533c | ||
|
|
5ec817ba37 | ||
|
|
9279b5f1fb | ||
|
|
921dfe3222 | ||
|
|
8216785b27 | ||
|
|
2bfe4d6a6e | ||
|
|
691cbef1f2 | ||
|
|
fed0ef6185 | ||
|
|
e8779511ad | ||
|
|
99dba80d19 | ||
|
|
471fefce8b | ||
|
|
869c755065 | ||
|
|
a5ffbffed9 | ||
|
|
784d651c5b | ||
|
|
b19bca3b50 | ||
|
|
1121c58ada | ||
|
|
fb2987e9ef | ||
|
|
a25cd426a9 | ||
|
|
993713925a | ||
|
|
ae6e5a48fa | ||
|
|
c125bc54ba | ||
|
|
7cbdcd4c94 | ||
|
|
cef60b55e4 | ||
|
|
07bf68c8ca | ||
|
|
3d07d0f678 | ||
|
|
6c2fb4b287 | ||
|
|
c1b8feaf6f | ||
|
|
8a9cdc6133 | ||
|
|
6db75fe29c | ||
|
|
ea7ebe66b1 | ||
|
|
41447e566a | ||
|
|
cebd0b3599 | ||
|
|
caa522118d | ||
|
|
aea5f39059 | ||
|
|
93066ef5d5 | ||
|
|
2db9d35678 | ||
|
|
37cd01e306 | ||
|
|
f29867968a | ||
|
|
c91972cc0a | ||
|
|
d3defc9785 | ||
|
|
84c7375d2a | ||
|
|
5cb37a0b9c | ||
|
|
c347dd7dcd | ||
|
|
0ec206b75d | ||
|
|
e8718a84fe | ||
|
|
983e0fa081 | ||
|
|
ef108839c4 | ||
|
|
fe04e5a292 | ||
|
|
50e0cb7ffd | ||
|
|
d37d210921 | ||
|
|
ab3eadf767 | ||
|
|
dbdf2f001a | ||
|
|
0d069bf46e | ||
|
|
962923ff4f | ||
|
|
f720a9afb2 | ||
|
|
4032aa62c5 | ||
|
|
cbe248591e | ||
|
|
75a9b71edb | ||
|
|
ef42ce04a4 | ||
|
|
6bafdb6dd8 | ||
|
|
3a0891e0ee | ||
|
|
53efad3399 | ||
|
|
304ef1a80c | ||
|
|
16d41a3841 | ||
|
|
72307ec100 | ||
|
|
a9f4427b21 | ||
|
|
2eb1d03c20 | ||
|
|
94bf90dac5 | ||
|
|
b0e941e4e2 | ||
|
|
e22265dc93 | ||
|
|
075c234385 | ||
|
|
bc539e0d01 | ||
|
|
04fb13cbca | ||
|
|
ca5cf27957 | ||
|
|
433682e913 | ||
|
|
f181b671f3 | ||
|
|
e4f31aea08 | ||
|
|
c2a3e47d3d | ||
|
|
f82d4a9ead | ||
|
|
cef4110eb0 | ||
|
|
3672ee4ef1 | ||
|
|
c56097b8c0 | ||
|
|
0d57e0ab32 | ||
|
|
df35ccecc9 | ||
|
|
3f22642732 | ||
|
|
e339b7ad8f | ||
|
|
38d8d3ea9b | ||
|
|
3710b182d3 | ||
|
|
6897575a62 | ||
|
|
388151b70b | ||
|
|
d4991b9a48 | ||
|
|
1bf683e044 | ||
|
|
807148671f | ||
|
|
d2b81ad2da | ||
|
|
3d14c9d9fe | ||
|
|
d7f40cf578 | ||
|
|
b370ef72ee | ||
|
|
0341205666 | ||
|
|
41fe7a59eb | ||
|
|
dcbee45d82 | ||
|
|
c3560c6586 | ||
|
|
a477f55b23 | ||
|
|
9ee1d8cb03 | ||
|
|
b478e36b59 | ||
|
|
2a1cef0360 | ||
|
|
52b9b12f74 | ||
|
|
45ba8cbc83 | ||
|
|
4ce40fb3db | ||
|
|
9ba2ed7d89 | ||
|
|
6157900b23 | ||
|
|
a9045adf17 | ||
|
|
d1e462bb37 | ||
|
|
7df63151b5 | ||
|
|
05b0716822 | ||
|
|
b75c9a8d8d | ||
|
|
099c5d50ee | ||
|
|
a953013f70 | ||
|
|
a77fe7aa90 | ||
|
|
cb344ea1f5 | ||
|
|
40c0bbcfb4 | ||
|
|
7005ae2b53 | ||
|
|
21d7a1865c | ||
|
|
f65b9a4dcb | ||
|
|
6d216f2607 | ||
|
|
4958be7898 | ||
|
|
a40e44c6d5 | ||
|
|
44af90dc6c | ||
|
|
f01d82ad1e | ||
|
|
ac6fef3073 | ||
|
|
c64c15948b | ||
|
|
e58b68b6fc | ||
|
|
68325866ef | ||
|
|
80198f5fda | ||
|
|
6ac28ad614 | ||
|
|
c021ffddf2 | ||
|
|
f8997446e2 | ||
|
|
9487c02954 | ||
|
|
0188cabbde | ||
|
|
392a6e0137 | ||
|
|
7e62c60748 | ||
|
|
9bff707fb5 | ||
|
|
0d599ef2dc | ||
|
|
e183a0cc63 | ||
|
|
958a3676af | ||
|
|
59a0925d34 | ||
|
|
fbbf58481d | ||
|
|
6356bb1dbb | ||
|
|
d08bce35a3 | ||
|
|
9297448ec8 | ||
|
|
5329326602 | ||
|
|
062fc9dbc0 | ||
|
|
fde8630c5b | ||
|
|
aa0b2c0be4 | ||
|
|
f70eae2f3b | ||
|
|
cf8823fa96 | ||
|
|
5f3d02606c | ||
|
|
aeed6590b7 | ||
|
|
1f18b08655 | ||
|
|
e4dd2a6c07 | ||
|
|
bcb9c73634 | ||
|
|
952eee8d55 | ||
|
|
da469dac18 | ||
|
|
6fac320a05 | ||
|
|
cc7b34e399 | ||
|
|
2d6c26a5d6 | ||
|
|
f1acd46e15 | ||
|
|
c023f7d89b | ||
|
|
8fa45ef9a6 | ||
|
|
8bcc295061 | ||
|
|
1b080012ab | ||
|
|
f6dfca4fdc | ||
|
|
3de655cbd4 | ||
|
|
376f781052 | ||
|
|
827f47809b | ||
|
|
dd11ebf335 | ||
|
|
0c35e196be | ||
|
|
6303847026 | ||
|
|
214692f5b2 | ||
|
|
b7198234de | ||
|
|
7e0ac10fe8 | ||
|
|
f9d154dd82 | ||
|
|
1c6a2fb7dd | ||
|
|
5c272db83b | ||
|
|
602ae01b0b | ||
|
|
cd3fa94b9c | ||
|
|
51c2ea6fcb | ||
|
|
64752de3a8 | ||
|
|
84578a2764 | ||
|
|
126575d22a | ||
|
|
d3af913ec7 | ||
|
|
db4ecee475 | ||
|
|
527c4ece57 | ||
|
|
23b0d4339d | ||
|
|
1478e66dc4 | ||
|
|
a49d899ea1 | ||
|
|
3f6ef56a0f | ||
|
|
cba27c348d | ||
|
|
ffe87cc3b4 | ||
|
|
473932af0a | ||
|
|
a9aeeb6707 | ||
|
|
075eefe1a5 | ||
|
|
54bdd62d0c | ||
|
|
d4ee32cb41 | ||
|
|
31bba2926d | ||
|
|
d6c25a76f6 | ||
|
|
8a792d381b | ||
|
|
4353cc0c4a | ||
|
|
82eea3e802 | ||
|
|
bf1f12378e | ||
|
|
c4a3e1e8ac | ||
|
|
b62b2710f5 | ||
|
|
71b41fa22b | ||
|
|
3528d2c934 | ||
|
|
39ecfbe7e1 | ||
|
|
a95864ba11 | ||
|
|
b88ae112f9 | ||
|
|
2d20278c9b | ||
|
|
8cff059868 | ||
|
|
6a3ccafe35 | ||
|
|
cc9b448a9b | ||
|
|
e071bf4861 | ||
|
|
b9da7df6b7 | ||
|
|
03cc819601 | ||
|
|
e1943ee11e | ||
|
|
b47d2b8825 | ||
|
|
300b47f9a1 | ||
|
|
03a4a97375 | ||
|
|
6157d5771d | ||
|
|
eee43be99a | ||
|
|
4db95cc941 | ||
|
|
6aa139a851 | ||
|
|
ac74cd9e92 | ||
|
|
7ae841d525 | ||
|
|
7aa5b6aa91 | ||
|
|
28c3f9d0cc | ||
|
|
9d01a6d5d7 | ||
|
|
4fd8b4a3a9 | ||
|
|
49cc73b6ed | ||
|
|
363507f987 | ||
|
|
30453d1c79 | ||
|
|
dff12729c0 | ||
|
|
8efe692c80 | ||
|
|
ce57c1423c | ||
|
|
1eb1e82fe4 | ||
|
|
a2328d0cbe | ||
|
|
5096a15051 | ||
|
|
55c2511ab5 | ||
|
|
16bc64e2fa | ||
|
|
14083ea7da | ||
|
|
3d61604569 | ||
|
|
feb88e64a4 | ||
|
|
a00bb35e54 | ||
|
|
20ba91b98c | ||
|
|
456c7f55a9 | ||
|
|
c2da3ea4c8 | ||
|
|
2b595cfe62 | ||
|
|
7a6b50a6e1 | ||
|
|
a5c2acb5f1 | ||
|
|
4cf0c702ce | ||
|
|
d36c3acbf7 | ||
|
|
e244f48776 | ||
|
|
89d1926727 | ||
|
|
9bd70cdb4e | ||
|
|
99f3d5810d | ||
|
|
10b5c625ef | ||
|
|
c14fb814c4 | ||
|
|
c82dd6901e | ||
|
|
a03a41ea5f | ||
|
|
9f4dd771fc | ||
|
|
0deec92d91 | ||
|
|
d2a6307bb0 | ||
|
|
66be0b1862 | ||
|
|
ddad1767a2 | ||
|
|
6a37a2ce21 | ||
|
|
01bd1bde64 | ||
|
|
9268180aec | ||
|
|
ff778b98f5 | ||
|
|
8f5ce6b232 | ||
|
|
58a4ca9f36 | ||
|
|
312b077657 | ||
|
|
c65e42f807 | ||
|
|
f4af78c0fc | ||
|
|
c0b6abc3d5 | ||
|
|
2f2e6626c6 | ||
|
|
6a8d3202b7 | ||
|
|
51b52a7fc3 | ||
|
|
23ede81737 | ||
|
|
b698f44500 | ||
|
|
421839ec51 | ||
|
|
940b5e4e44 | ||
|
|
6003c88d62 | ||
|
|
74913a6659 | ||
|
|
97578684c6 | ||
|
|
88b4d32220 | ||
|
|
f32635a6a8 | ||
|
|
7fe58e0ea9 | ||
|
|
7f22cd1ac1 | ||
|
|
e2550e0b2d | ||
|
|
b016ed78cf | ||
|
|
c429ca7b36 | ||
|
|
ee22dbba1b | ||
|
|
f4a208bd44 | ||
|
|
8edff26ccd | ||
|
|
d08c03f557 | ||
|
|
0b53912295 | ||
|
|
586a320d86 | ||
|
|
8f3a0be177 | ||
|
|
0679e140a2 | ||
|
|
b611f5110f | ||
|
|
0f7bc6979f | ||
|
|
12501d0597 | ||
|
|
3a86fff7c1 | ||
|
|
58a4b45463 | ||
|
|
22836ea03e | ||
|
|
13cc8b0e96 | ||
|
|
9addcde553 | ||
|
|
26a9b7fced | ||
|
|
62dd80874f | ||
|
|
9ae1ce0a9a | ||
|
|
00cc338c07 | ||
|
|
4432be15e4 | ||
|
|
20893c6017 | ||
|
|
95f43a7bb6 | ||
|
|
fd7eedc343 | ||
|
|
d2c9b437f4 | ||
|
|
2f57d0e138 | ||
|
|
59ddc02a31 | ||
|
|
3ac20741d9 | ||
|
|
8ea0772a1b | ||
|
|
bddad8932b | ||
|
|
8acea7f599 | ||
|
|
a908bf9edd | ||
|
|
79fff4744a | ||
|
|
369d927321 | ||
|
|
996d11de12 | ||
|
|
0345336d90 | ||
|
|
75d14e7c3a | ||
|
|
76fdb81249 | ||
|
|
88669af141 | ||
|
|
71dcbd938e | ||
|
|
4060412b18 | ||
|
|
9d715683f7 | ||
|
|
0eb97e32cd | ||
|
|
3f708e451c | ||
|
|
e962770a5f | ||
|
|
4bd6ee5014 | ||
|
|
e9372adcf4 | ||
|
|
ae3b588081 | ||
|
|
7183cffdb7 | ||
|
|
c779fc095c | ||
|
|
b5493a31f8 | ||
|
|
25eb727eb9 | ||
|
|
ade4d290f5 | ||
|
|
39ca600091 | ||
|
|
cfdb3373c9 | ||
|
|
95175ab939 | ||
|
|
42e928138c | ||
|
|
8db51ab295 | ||
|
|
58ac1dcba1 | ||
|
|
7430ccf9a8 | ||
|
|
85ee7f9af0 | ||
|
|
06ecbe884c | ||
|
|
8a6a5a8ca7 | ||
|
|
87ea13c32e | ||
|
|
e3ceb4825a | ||
|
|
de009d6d10 | ||
|
|
7b3f206f57 | ||
|
|
2018114543 | ||
|
|
ff8c5ee910 | ||
|
|
2ddd7096e4 | ||
|
|
add35b5ea6 | ||
|
|
448a34aa5f | ||
|
|
8c57543f72 | ||
|
|
9062a7b67d | ||
|
|
89603bb2d6 | ||
|
|
3ab959c682 | ||
|
|
d96ab2e7af | ||
|
|
5d8f66ae22 | ||
|
|
ce4a375b62 | ||
|
|
92645c1cee | ||
|
|
ac14d57f8b | ||
|
|
71ebe5ca36 | ||
|
|
bb71e60fcb | ||
|
|
3691cef351 | ||
|
|
c2d8703acf | ||
|
|
507946886f | ||
|
|
24944a03c6 | ||
|
|
cb045abfe1 | ||
|
|
24cc69fd7b | ||
|
|
88c26b334d | ||
|
|
200be0ac7f | ||
|
|
ae657af958 | ||
|
|
f4c4848a0d | ||
|
|
6914dc9f42 | ||
|
|
742559bc63 | ||
|
|
70dacc5e6a | ||
|
|
5b96c970cd | ||
|
|
906d095a1e | ||
|
|
0af9b09844 | ||
|
|
edb68a1bc6 | ||
|
|
790ecee629 | ||
|
|
0cabde03f0 | ||
|
|
d3d3bf79f9 | ||
|
|
6321977f1f | ||
|
|
208df80c86 | ||
|
|
bc27bc9dd2 | ||
|
|
1acaae9d5e | ||
|
|
fbbca0c519 | ||
|
|
a6216beb7e | ||
|
|
732963b591 | ||
|
|
625cbf872b | ||
|
|
d26fb8ce02 | ||
|
|
e4f9d027fe | ||
|
|
c1407daa47 | ||
|
|
2622dd691c | ||
|
|
870ca17e13 | ||
|
|
4652ad0fc3 | ||
|
|
be9dbc1b18 | ||
|
|
3fd2550d69 | ||
|
|
d6bcd8dd15 | ||
|
|
873e4330bc | ||
|
|
ade0aa1643 | ||
|
|
6a13a64996 | ||
|
|
5e6c02358d | ||
|
|
61d6d928ba | ||
|
|
a555df27e7 | ||
|
|
df671d13c7 | ||
|
|
c7ebaf6ba0 | ||
|
|
119d46dc2b | ||
|
|
25f7d813b9 | ||
|
|
ec2af13258 | ||
|
|
8833e4e23b | ||
|
|
752a27a175 | ||
|
|
94f421f27d | ||
|
|
8d7425a3b7 | ||
|
|
211d5e1cd0 | ||
|
|
3c6bbaef3c | ||
|
|
4159d12959 | ||
|
|
2f2f8dc5f4 | ||
|
|
756a71ca78 | ||
|
|
36b3328c5e | ||
|
|
a5c1282e52 | ||
|
|
ed64168ca7 | ||
|
|
f54f3a6091 | ||
|
|
2d9464e841 | ||
|
|
70f72a2b0f | ||
|
|
c0b5e0e766 | ||
|
|
fedcdf0c84 | ||
|
|
ff936887d2 | ||
|
|
ea78c2bceb | ||
|
|
ba1a314608 | ||
|
|
3a6a8e3a97 | ||
|
|
1735561ffd | ||
|
|
b80a904bbf | ||
|
|
20260f0720 | ||
|
|
6070ed4d36 | ||
|
|
ac47cc62ee | ||
|
|
1059fbbebf | ||
|
|
61478d1b6b | ||
|
|
1b29f65664 | ||
|
|
88737b1072 | ||
|
|
a229508611 | ||
|
|
34d114a4c5 | ||
|
|
d54c1bae03 | ||
|
|
d5bd4ef63a | ||
|
|
146332fff3 | ||
|
|
5802858772 | ||
|
|
b39ce9c18a | ||
|
|
a7ab5ae680 | ||
|
|
f2a08853e2 | ||
|
|
23eeb45713 | ||
|
|
6c83a0df09 | ||
|
|
dbee7488e1 | ||
|
|
9ed4591edc |
@@ -38,3 +38,9 @@ USE_MINIO=1
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
|
||||
# Force HTTPS for handling SSL Termination
|
||||
MINIO_ENDPOINT_SSL=0
|
||||
|
||||
# API key rate limit
|
||||
API_KEY_RATE_LIMIT="60/minute"
|
||||
|
||||
126
.github/actions/build-push-ce/action.yml
vendored
126
.github/actions/build-push-ce/action.yml
vendored
@@ -1,126 +0,0 @@
|
||||
name: "Build and Push Docker Image"
|
||||
description: "Reusable action for building and pushing Docker images"
|
||||
inputs:
|
||||
docker-username:
|
||||
description: "The Dockerhub username"
|
||||
required: true
|
||||
docker-token:
|
||||
description: "The Dockerhub Token"
|
||||
required: true
|
||||
|
||||
# Docker Image Options
|
||||
docker-image-owner:
|
||||
description: "The owner of the Docker image"
|
||||
required: true
|
||||
docker-image-name:
|
||||
description: "The name of the Docker image"
|
||||
required: true
|
||||
build-context:
|
||||
description: "The build context"
|
||||
required: true
|
||||
default: "."
|
||||
dockerfile-path:
|
||||
description: "The path to the Dockerfile"
|
||||
required: true
|
||||
build-args:
|
||||
description: "The build arguments"
|
||||
required: false
|
||||
default: ""
|
||||
|
||||
# Buildx Options
|
||||
buildx-driver:
|
||||
description: "Buildx driver"
|
||||
required: true
|
||||
default: "docker-container"
|
||||
buildx-version:
|
||||
description: "Buildx version"
|
||||
required: true
|
||||
default: "latest"
|
||||
buildx-platforms:
|
||||
description: "Buildx platforms"
|
||||
required: true
|
||||
default: "linux/amd64"
|
||||
buildx-endpoint:
|
||||
description: "Buildx endpoint"
|
||||
required: true
|
||||
default: "default"
|
||||
|
||||
# Release Build Options
|
||||
build-release:
|
||||
description: "Flag to publish release"
|
||||
required: false
|
||||
default: "false"
|
||||
build-prerelease:
|
||||
description: "Flag to publish prerelease"
|
||||
required: false
|
||||
default: "false"
|
||||
release-version:
|
||||
description: "The release version"
|
||||
required: false
|
||||
default: "latest"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Set Docker Tag
|
||||
shell: bash
|
||||
env:
|
||||
IMG_OWNER: ${{ inputs.docker-image-owner }}
|
||||
IMG_NAME: ${{ inputs.docker-image-name }}
|
||||
BUILD_RELEASE: ${{ inputs.build-release }}
|
||||
IS_PRERELEASE: ${{ inputs.build-prerelease }}
|
||||
REL_VERSION: ${{ inputs.release-version }}
|
||||
run: |
|
||||
FLAT_BRANCH_VERSION=$(echo "${{ github.ref_name }}" | sed 's/[^a-zA-Z0-9.-]//g')
|
||||
|
||||
if [ "${{ env.BUILD_RELEASE }}" == "true" ]; then
|
||||
semver_regex="^v([0-9]+)\.([0-9]+)\.([0-9]+)(-[a-zA-Z0-9]+(-[a-zA-Z0-9]+)*)?$"
|
||||
if [[ ! ${{ env.REL_VERSION }} =~ $semver_regex ]]; then
|
||||
echo "Invalid Release Version Format : ${{ env.REL_VERSION }}"
|
||||
echo "Please provide a valid SemVer version"
|
||||
echo "e.g. v1.2.3 or v1.2.3-alpha-1"
|
||||
echo "Exiting the build process"
|
||||
exit 1 # Exit with status 1 to fail the step
|
||||
fi
|
||||
|
||||
TAG=${{ env.IMG_OWNER }}/${{ env.IMG_NAME }}:${{ env.REL_VERSION }}
|
||||
|
||||
if [ "${{ env.IS_PRERELEASE }}" != "true" ]; then
|
||||
TAG=${TAG},${{ env.IMG_OWNER }}/${{ env.IMG_NAME }}:stable
|
||||
fi
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=${{ env.IMG_OWNER }}/${{ env.IMG_NAME }}:latest
|
||||
else
|
||||
TAG=${{ env.IMG_OWNER }}/${{ env.IMG_NAME }}:${FLAT_BRANCH_VERSION}
|
||||
fi
|
||||
|
||||
echo "DOCKER_TAGS=${TAG}" >> $GITHUB_ENV
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ inputs.docker-username }}
|
||||
password: ${{ inputs.docker-token}}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ inputs.buildx-driver }}
|
||||
version: ${{ inputs.buildx-version }}
|
||||
endpoint: ${{ inputs.buildx-endpoint }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Docker Image
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: ${{ inputs.build-context }}
|
||||
file: ${{ inputs.dockerfile-path }}
|
||||
platforms: ${{ inputs.buildx-platforms }}
|
||||
tags: ${{ env.DOCKER_TAGS }}
|
||||
push: true
|
||||
build-args: ${{ inputs.build-args }}
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ inputs.docker-username }}
|
||||
DOCKER_PASSWORD: ${{ inputs.docker-token }}
|
||||
4
.github/workflows/build-aio-branch.yml
vendored
4
.github/workflows/build-aio-branch.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
|
||||
full_build_push:
|
||||
if: ${{ needs.branch_build_setup.outputs.do_full_build == 'true' }}
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BUILD_TYPE: full
|
||||
@@ -148,7 +148,7 @@ jobs:
|
||||
|
||||
slim_build_push:
|
||||
if: ${{ needs.branch_build_setup.outputs.do_slim_build == 'true' }}
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BUILD_TYPE: slim
|
||||
|
||||
153
.github/workflows/build-branch.yml
vendored
153
.github/workflows/build-branch.yml
vendored
@@ -25,6 +25,10 @@ on:
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
push:
|
||||
branches:
|
||||
- preview
|
||||
- canary
|
||||
|
||||
env:
|
||||
TARGET_BRANCH: ${{ github.ref_name }}
|
||||
@@ -36,19 +40,13 @@ env:
|
||||
jobs:
|
||||
branch_build_setup:
|
||||
name: Build Setup
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
build_proxy: ${{ steps.changed_files.outputs.proxy_any_changed }}
|
||||
build_apiserver: ${{ steps.changed_files.outputs.apiserver_any_changed }}
|
||||
build_admin: ${{ steps.changed_files.outputs.admin_any_changed }}
|
||||
build_space: ${{ steps.changed_files.outputs.space_any_changed }}
|
||||
build_web: ${{ steps.changed_files.outputs.web_any_changed }}
|
||||
build_live: ${{ steps.changed_files.outputs.live_any_changed }}
|
||||
|
||||
dh_img_web: ${{ steps.set_env_variables.outputs.DH_IMG_WEB }}
|
||||
dh_img_space: ${{ steps.set_env_variables.outputs.DH_IMG_SPACE }}
|
||||
@@ -119,61 +117,19 @@ jobs:
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
uses: tj-actions/changed-files@v42
|
||||
with:
|
||||
files_yaml: |
|
||||
apiserver:
|
||||
- apiserver/**
|
||||
proxy:
|
||||
- nginx/**
|
||||
admin:
|
||||
- admin/**
|
||||
- packages/**
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
space:
|
||||
- space/**
|
||||
- packages/**
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
web:
|
||||
- web/**
|
||||
- packages/**
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
live:
|
||||
- live/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
|
||||
branch_build_push_admin:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_admin == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Admin Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
steps:
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
- name: Admin Build and Push
|
||||
uses: ./.github/actions/build-push-ce
|
||||
uses: makeplane/actions/build-push@v1.0.0
|
||||
with:
|
||||
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
|
||||
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
|
||||
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
docker-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_admin }}
|
||||
build-context: .
|
||||
@@ -184,22 +140,18 @@ jobs:
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_web:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_web == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Web Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
steps:
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
- name: Web Build and Push
|
||||
uses: ./.github/actions/build-push-ce
|
||||
uses: makeplane/actions/build-push@v1.0.0
|
||||
with:
|
||||
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
|
||||
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
|
||||
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
docker-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_web }}
|
||||
build-context: .
|
||||
@@ -210,22 +162,18 @@ jobs:
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_space:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Space Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
steps:
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
- name: Space Build and Push
|
||||
uses: ./.github/actions/build-push-ce
|
||||
uses: makeplane/actions/build-push@v1.0.0
|
||||
with:
|
||||
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
|
||||
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
|
||||
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
docker-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_space }}
|
||||
build-context: .
|
||||
@@ -236,22 +184,18 @@ jobs:
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_live:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_live == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Live Collaboration Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
steps:
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
- name: Live Build and Push
|
||||
uses: ./.github/actions/build-push-ce
|
||||
uses: makeplane/actions/build-push@v1.0.0
|
||||
with:
|
||||
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
|
||||
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
|
||||
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
docker-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_live }}
|
||||
build-context: .
|
||||
@@ -262,22 +206,18 @@ jobs:
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_apiserver:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_apiserver == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push API Server Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
steps:
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
- name: Backend Build and Push
|
||||
uses: ./.github/actions/build-push-ce
|
||||
uses: makeplane/actions/build-push@v1.0.0
|
||||
with:
|
||||
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
|
||||
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
|
||||
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
docker-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_backend }}
|
||||
build-context: ./apiserver
|
||||
@@ -288,22 +228,18 @@ jobs:
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_proxy:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Proxy Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
steps:
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
- name: Proxy Build and Push
|
||||
uses: ./.github/actions/build-push-ce
|
||||
uses: makeplane/actions/build-push@v1.0.0
|
||||
with:
|
||||
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
|
||||
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
|
||||
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
docker-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_proxy }}
|
||||
build-context: ./nginx
|
||||
@@ -313,35 +249,10 @@ jobs:
|
||||
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
attach_assets_to_build:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_type == 'Release' }}
|
||||
name: Attach Assets to Release
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Update Assets
|
||||
run: |
|
||||
cp ./deploy/selfhost/install.sh deploy/selfhost/setup.sh
|
||||
|
||||
- name: Attach Assets
|
||||
id: attach_assets
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: selfhost-assets
|
||||
retention-days: 2
|
||||
path: |
|
||||
${{ github.workspace }}/deploy/selfhost/setup.sh
|
||||
${{ github.workspace }}/deploy/selfhost/restore.sh
|
||||
${{ github.workspace }}/deploy/selfhost/docker-compose.yml
|
||||
${{ github.workspace }}/deploy/selfhost/variables.env
|
||||
|
||||
publish_release:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_type == 'Release' }}
|
||||
name: Build Release
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
[
|
||||
branch_build_setup,
|
||||
@@ -351,7 +262,6 @@ jobs:
|
||||
branch_build_push_live,
|
||||
branch_build_push_apiserver,
|
||||
branch_build_push_proxy,
|
||||
attach_assets_to_build,
|
||||
]
|
||||
env:
|
||||
REL_VERSION: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
@@ -362,6 +272,8 @@ jobs:
|
||||
- name: Update Assets
|
||||
run: |
|
||||
cp ./deploy/selfhost/install.sh deploy/selfhost/setup.sh
|
||||
sed -i 's/${APP_RELEASE:-stable}/${APP_RELEASE:-'${REL_VERSION}'}/g' deploy/selfhost/docker-compose.yml
|
||||
sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deploy/selfhost/variables.env
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
@@ -376,6 +288,7 @@ jobs:
|
||||
generate_release_notes: true
|
||||
files: |
|
||||
${{ github.workspace }}/deploy/selfhost/setup.sh
|
||||
${{ github.workspace }}/deploy/selfhost/swarm.sh
|
||||
${{ github.workspace }}/deploy/selfhost/restore.sh
|
||||
${{ github.workspace }}/deploy/selfhost/docker-compose.yml
|
||||
${{ github.workspace }}/deploy/selfhost/variables.env
|
||||
|
||||
63
.github/workflows/build-test-pull-request.yml
vendored
63
.github/workflows/build-test-pull-request.yml
vendored
@@ -6,49 +6,9 @@ on:
|
||||
types: ["opened", "synchronize", "ready_for_review"]
|
||||
|
||||
jobs:
|
||||
get-changed-files:
|
||||
lint-apiserver:
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
apiserver_changed: ${{ steps.changed-files.outputs.apiserver_any_changed }}
|
||||
admin_changed: ${{ steps.changed-files.outputs.admin_any_changed }}
|
||||
space_changed: ${{ steps.changed-files.outputs.space_any_changed }}
|
||||
web_changed: ${{ steps.changed-files.outputs.web_any_changed }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v44
|
||||
with:
|
||||
files_yaml: |
|
||||
apiserver:
|
||||
- apiserver/**
|
||||
admin:
|
||||
- admin/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
space:
|
||||
- space/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
web:
|
||||
- web/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
|
||||
lint-apiserver:
|
||||
needs: get-changed-files
|
||||
runs-on: ubuntu-latest
|
||||
if: needs.get-changed-files.outputs.apiserver_changed == 'true'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
@@ -63,41 +23,38 @@ jobs:
|
||||
run: ruff check --fix apiserver
|
||||
|
||||
lint-admin:
|
||||
needs: get-changed-files
|
||||
if: needs.get-changed-files.outputs.admin_changed == 'true'
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
- run: yarn install
|
||||
- run: yarn lint --filter=admin
|
||||
|
||||
lint-space:
|
||||
needs: get-changed-files
|
||||
if: needs.get-changed-files.outputs.space_changed == 'true'
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
- run: yarn install
|
||||
- run: yarn lint --filter=space
|
||||
|
||||
lint-web:
|
||||
needs: get-changed-files
|
||||
if: needs.get-changed-files.outputs.web_changed == 'true'
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
- run: yarn install
|
||||
- run: yarn lint --filter=web
|
||||
|
||||
@@ -109,7 +66,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
- run: yarn install
|
||||
- run: yarn build --filter=admin
|
||||
|
||||
@@ -121,7 +78,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
- run: yarn install
|
||||
- run: yarn build --filter=space
|
||||
|
||||
@@ -133,6 +90,6 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
- run: yarn install
|
||||
- run: yarn build --filter=web
|
||||
|
||||
2
.github/workflows/feature-deployment.yml
vendored
2
.github/workflows/feature-deployment.yml
vendored
@@ -51,7 +51,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
full_build_push:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BUILD_TYPE: full
|
||||
|
||||
2
.github/workflows/sync-repo.yml
vendored
2
.github/workflows/sync-repo.yml
vendored
@@ -11,7 +11,7 @@ env:
|
||||
|
||||
jobs:
|
||||
sync_changes:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: read
|
||||
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -78,10 +78,17 @@ pnpm-workspace.yaml
|
||||
.npmrc
|
||||
.secrets
|
||||
tmp/
|
||||
|
||||
## packages
|
||||
dist
|
||||
.temp/
|
||||
deploy/selfhost/plane-app/
|
||||
|
||||
## Storybook
|
||||
*storybook.log
|
||||
output.css
|
||||
|
||||
dev-editor
|
||||
# Redis
|
||||
*.rdb
|
||||
*.rdb.gz
|
||||
|
||||
134
CONTRIBUTING.md
134
CONTRIBUTING.md
@@ -62,17 +62,143 @@ To ensure consistency throughout the source code, please keep these rules in min
|
||||
- All features or bug fixes must be tested by one or more specs (unit-tests).
|
||||
- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier.
|
||||
|
||||
## Need help? Questions and suggestions
|
||||
|
||||
Questions, suggestions, and thoughts are most welcome. We can also be reached in our [Discord Server](https://discord.com/invite/A92xrEGCge).
|
||||
|
||||
## Ways to contribute
|
||||
|
||||
- Try Plane Cloud and the self hosting platform and give feedback
|
||||
- Add new integrations
|
||||
- Add or update translations
|
||||
- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose)
|
||||
- Share your thoughts and suggestions with us
|
||||
- Help create tutorials and blog posts
|
||||
- Request a feature by submitting a proposal
|
||||
- Report a bug
|
||||
- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
|
||||
|
||||
## Contributing to language support
|
||||
This guide is designed to help contributors understand how to add or update translations in the application.
|
||||
|
||||
### Understanding translation structure
|
||||
|
||||
#### File organization
|
||||
Translations are organized by language in the locales directory. Each language has its own folder containing JSON files for translations. Here's how it looks:
|
||||
|
||||
```
|
||||
packages/i18n/src/locales/
|
||||
├── en/
|
||||
│ ├── core.json # Critical translations
|
||||
│ └── translations.json
|
||||
├── fr/
|
||||
│ └── translations.json
|
||||
└── [language]/
|
||||
└── translations.json
|
||||
```
|
||||
#### Nested structure
|
||||
To keep translations organized, we use a nested structure for keys. This makes it easier to manage and locate specific translations. For example:
|
||||
|
||||
```json
|
||||
{
|
||||
"issue": {
|
||||
"label": "Work item",
|
||||
"title": {
|
||||
"label": "Work item title"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Translation formatting guide
|
||||
We use [IntlMessageFormat](https://formatjs.github.io/docs/intl-messageformat/) to handle dynamic content, such as variables and pluralization. Here's how to format your translations:
|
||||
|
||||
#### Examples
|
||||
- **Simple variables**
|
||||
```json
|
||||
{
|
||||
"greeting": "Hello, {name}!"
|
||||
}
|
||||
```
|
||||
|
||||
- **Pluralization**
|
||||
```json
|
||||
{
|
||||
"items": "{count, plural, one {Work item} other {Work items}}"
|
||||
}
|
||||
```
|
||||
|
||||
### Contributing guidelines
|
||||
|
||||
#### Updating existing translations
|
||||
1. Locate the key in `locales/<language>/translations.json`.
|
||||
|
||||
2. Update the value while ensuring the key structure remains intact.
|
||||
3. Preserve any existing ICU formats (e.g., variables, pluralization).
|
||||
|
||||
#### Adding new translation keys
|
||||
1. When introducing a new key, ensure it is added to **all** language files, even if translations are not immediately available. Use English as a placeholder if needed.
|
||||
|
||||
2. Keep the nesting structure consistent across all languages.
|
||||
|
||||
3. If the new key requires dynamic content (e.g., variables or pluralization), ensure the ICU format is applied uniformly across all languages.
|
||||
|
||||
### Adding new languages
|
||||
Adding a new language involves several steps to ensure it integrates seamlessly with the project. Follow these instructions carefully:
|
||||
|
||||
1. **Update type definitions**
|
||||
Add the new language to the TLanguage type in the language definitions file:
|
||||
|
||||
```typescript
|
||||
// types/language.ts
|
||||
export type TLanguage = "en" | "fr" | "your-lang";
|
||||
```
|
||||
|
||||
2. **Add language configuration**
|
||||
Include the new language in the list of supported languages:
|
||||
|
||||
```typescript
|
||||
// constants/language.ts
|
||||
export const SUPPORTED_LANGUAGES: ILanguageOption[] = [
|
||||
{ label: "English", value: "en" },
|
||||
{ label: "Your Language", value: "your-lang" }
|
||||
];
|
||||
```
|
||||
|
||||
3. **Create translation files**
|
||||
1. Create a new folder for your language under locales (e.g., `locales/your-lang/`).
|
||||
|
||||
2. Add a `translations.json` file inside the folder.
|
||||
|
||||
3. Copy the structure from an existing translation file and translate all keys.
|
||||
|
||||
4. **Update import logic**
|
||||
Modify the language import logic to include your new language:
|
||||
|
||||
```typescript
|
||||
private importLanguageFile(language: TLanguage): Promise<any> {
|
||||
switch (language) {
|
||||
case "your-lang":
|
||||
return import("../locales/your-lang/translations.json");
|
||||
// ...
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Quality checklist
|
||||
Before submitting your contribution, please ensure the following:
|
||||
|
||||
- All translation keys exist in every language file.
|
||||
- Nested structures match across all language files.
|
||||
- ICU message formats are correctly implemented.
|
||||
- All languages load without errors in the application.
|
||||
- Dynamic values and pluralization work as expected.
|
||||
- There are no missing or untranslated keys.
|
||||
|
||||
#### Pro tips
|
||||
- When in doubt, refer to the English translations for context.
|
||||
- Verify pluralization works with different numbers.
|
||||
- Ensure dynamic values (e.g., `{name}`) are correctly interpolated.
|
||||
- Double-check that nested key access paths are accurate.
|
||||
|
||||
Happy translating! 🌍✨
|
||||
|
||||
## Need help? Questions and suggestions
|
||||
|
||||
Questions, suggestions, and thoughts are most welcome. We can also be reached in our [Discord Server](https://discord.com/invite/A92xrEGCge).
|
||||
|
||||
@@ -43,9 +43,6 @@ NGINX_PORT=80
|
||||
# Debug value for api server use it as 0 for production use
|
||||
DEBUG=0
|
||||
CORS_ALLOWED_ORIGINS="http://localhost"
|
||||
# Error logs
|
||||
SENTRY_DSN=""
|
||||
SENTRY_ENVIRONMENT="development"
|
||||
# Database Settings
|
||||
POSTGRES_USER="plane"
|
||||
POSTGRES_PASSWORD="plane"
|
||||
|
||||
19
README.md
19
README.md
@@ -6,6 +6,7 @@
|
||||
</a>
|
||||
</p>
|
||||
<h1 align="center"><b>Plane</b></h1>
|
||||
<p align="center"><b>Open-source project management that unlocks customer value</b></p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://discord.com/invite/A92xrEGCge">
|
||||
@@ -15,10 +16,10 @@
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://dub.sh/plane-website-readme"><b>Website</b></a> •
|
||||
<a href="https://git.new/releases"><b>Releases</b></a> •
|
||||
<a href="https://dub.sh/planepowershq"><b>Twitter</b></a> •
|
||||
<a href="https://dub.sh/planedocs"><b>Documentation</b></a>
|
||||
<a href="https://plane.so/"><b>Website</b></a> •
|
||||
<a href="https://github.com/makeplane/plane/releases"><b>Releases</b></a> •
|
||||
<a href="https://twitter.com/planepowers"><b>Twitter</b></a> •
|
||||
<a href="https://docs.plane.so/"><b>Documentation</b></a>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
@@ -38,7 +39,7 @@
|
||||
</a>
|
||||
</p>
|
||||
|
||||
Meet [Plane](https://dub.sh/plane-website-readme), an open-source project management tool to track issues, run ~sprints~ cycles, and manage product roadmaps without the chaos of managing the tool itself. 🧘♀️
|
||||
Meet [Plane](https://plane.so/), an open-source project management tool to track issues, run ~sprints~ cycles, and manage product roadmaps without the chaos of managing the tool itself. 🧘♀️
|
||||
|
||||
> Plane is evolving every day. Your suggestions, ideas, and reported bugs help us immensely. Do not hesitate to join in the conversation on [Discord](https://discord.com/invite/A92xrEGCge) or raise a GitHub issue. We read everything and respond to most.
|
||||
|
||||
@@ -57,7 +58,7 @@ Prefer full control over your data and infrastructure? Install and run Plane on
|
||||
| Docker | [](https://developers.plane.so/self-hosting/methods/docker-compose) |
|
||||
| Kubernetes | [](https://developers.plane.so/self-hosting/methods/kubernetes) |
|
||||
|
||||
`Instance admins` can manage and customize settings using [God mode](https://developers.plane.so/self-hosting/govern/instance-admin).
|
||||
`Instance admins` can configure instance settings with [God mode](https://developers.plane.so/self-hosting/govern/instance-admin).
|
||||
|
||||
## 🌟 Features
|
||||
|
||||
@@ -117,9 +118,9 @@ Setting up your local environment is simple and straightforward. Follow these st
|
||||
|
||||
That’s it! You’re all set to begin coding. Remember to refresh your browser if changes don’t auto-reload. Happy contributing! 🎉
|
||||
|
||||
## Built with
|
||||
[](https://nextjs.org/)<br/>
|
||||
[](https://www.djangoproject.com/)<br/>
|
||||
## ⚙️ Built with
|
||||
[](https://nextjs.org/)
|
||||
[](https://www.djangoproject.com/)
|
||||
[](https://nodejs.org/en)
|
||||
|
||||
## 📸 Screenshots
|
||||
|
||||
@@ -2,7 +2,4 @@ module.exports = {
|
||||
root: true,
|
||||
extends: ["@plane/eslint-config/next.js"],
|
||||
parser: "@typescript-eslint/parser",
|
||||
parserOptions: {
|
||||
project: true,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
FROM node:20-alpine as base
|
||||
|
||||
# *****************************************************************************
|
||||
# STAGE 1: Build the project
|
||||
# *****************************************************************************
|
||||
FROM node:18-alpine AS builder
|
||||
FROM base AS builder
|
||||
RUN apk add --no-cache libc6-compat
|
||||
WORKDIR /app
|
||||
|
||||
@@ -13,7 +15,7 @@ RUN turbo prune --scope=admin --docker
|
||||
# *****************************************************************************
|
||||
# STAGE 2: Install dependencies & build the project
|
||||
# *****************************************************************************
|
||||
FROM node:18-alpine AS installer
|
||||
FROM base AS installer
|
||||
|
||||
RUN apk add --no-cache libc6-compat
|
||||
WORKDIR /app
|
||||
@@ -52,7 +54,7 @@ RUN yarn turbo run build --filter=admin
|
||||
# *****************************************************************************
|
||||
# STAGE 3: Copy the project and start it
|
||||
# *****************************************************************************
|
||||
FROM node:18-alpine AS runner
|
||||
FROM base AS runner
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=installer /app/admin/next.config.js .
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:18-alpine
|
||||
FROM node:20-alpine
|
||||
RUN apk add --no-cache libc6-compat
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
@@ -43,6 +43,7 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
defaultValues: {
|
||||
GITHUB_CLIENT_ID: config["GITHUB_CLIENT_ID"],
|
||||
GITHUB_CLIENT_SECRET: config["GITHUB_CLIENT_SECRET"],
|
||||
GITHUB_ORGANIZATION_ID: config["GITHUB_ORGANIZATION_ID"],
|
||||
},
|
||||
});
|
||||
|
||||
@@ -93,6 +94,19 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
error: Boolean(errors.GITHUB_CLIENT_SECRET),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "GITHUB_ORGANIZATION_ID",
|
||||
type: "text",
|
||||
label: "Organization ID",
|
||||
description: (
|
||||
<>
|
||||
The organization github ID.
|
||||
</>
|
||||
),
|
||||
placeholder: "123456789",
|
||||
error: Boolean(errors.GITHUB_ORGANIZATION_ID),
|
||||
required: false,
|
||||
},
|
||||
];
|
||||
|
||||
const GITHUB_SERVICE_FIELD: TCopyField[] = [
|
||||
@@ -150,6 +164,7 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
reset({
|
||||
GITHUB_CLIENT_ID: response.find((item) => item.key === "GITHUB_CLIENT_ID")?.value,
|
||||
GITHUB_CLIENT_SECRET: response.find((item) => item.key === "GITHUB_CLIENT_SECRET")?.value,
|
||||
GITHUB_ORGANIZATION_ID: response.find((item) => item.key === "GITHUB_ORGANIZATION_ID")?.value,
|
||||
});
|
||||
})
|
||||
.catch((err) => console.error(err));
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import React, { FC, useEffect, useState } from "react";
|
||||
import { Dialog, Transition } from "@headlessui/react";
|
||||
// plane imports
|
||||
import { InstanceService } from "@plane/services";
|
||||
// ui
|
||||
import { Button, Input } from "@plane/ui";
|
||||
// services
|
||||
import { InstanceService } from "@/services/instance.service";
|
||||
|
||||
type Props = {
|
||||
isOpen: boolean;
|
||||
|
||||
@@ -123,7 +123,7 @@ export const GeneralConfigurationForm: FC<IGeneralConfigurationForm> = observer(
|
||||
No PII is collected.This anonymized data is used to understand how you use Plane and build new features
|
||||
in line with{" "}
|
||||
<a
|
||||
href="https://docs.plane.so/self-hosting/telemetry"
|
||||
href="https://developers.plane.so/self-hosting/telemetry"
|
||||
target="_blank"
|
||||
className="text-custom-primary-100 hover:underline"
|
||||
rel="noreferrer"
|
||||
|
||||
@@ -7,15 +7,15 @@ import { DefaultLayout } from "@/layouts/default-layout";
|
||||
export const metadata: Metadata = {
|
||||
title: "Plane | Simple, extensible, open-source project management tool.",
|
||||
description:
|
||||
"Open-source project management tool to manage issues, sprints, and product roadmaps with peace of mind.",
|
||||
"Open-source project management tool to manage work items, sprints, and product roadmaps with peace of mind.",
|
||||
openGraph: {
|
||||
title: "Plane | Simple, extensible, open-source project management tool.",
|
||||
description:
|
||||
"Open-source project management tool to manage issues, sprints, and product roadmaps with peace of mind.",
|
||||
"Open-source project management tool to manage work items, sprints, and product roadmaps with peace of mind.",
|
||||
url: "https://plane.so/",
|
||||
},
|
||||
keywords:
|
||||
"software development, customer feedback, software, accelerate, code management, release management, project management, issue tracking, agile, scrum, kanban, collaboration",
|
||||
"software development, customer feedback, software, accelerate, code management, release management, project management, work items tracking, agile, scrum, kanban, collaboration",
|
||||
twitter: {
|
||||
site: "@planepowers",
|
||||
},
|
||||
|
||||
@@ -2,18 +2,16 @@ import { useState, useEffect } from "react";
|
||||
import Link from "next/link";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { Controller, useForm } from "react-hook-form";
|
||||
// constants
|
||||
// plane imports
|
||||
import { WEB_BASE_URL, ORGANIZATION_SIZE, RESTRICTED_URLS } from "@plane/constants";
|
||||
// types
|
||||
import { InstanceWorkspaceService } from "@plane/services";
|
||||
import { IWorkspace } from "@plane/types";
|
||||
// components
|
||||
import { Button, CustomSelect, getButtonStyling, Input, setToast, TOAST_TYPE } from "@plane/ui";
|
||||
// hooks
|
||||
import { useWorkspace } from "@/hooks/store";
|
||||
// services
|
||||
import { WorkspaceService } from "@/services/workspace.service";
|
||||
|
||||
const workspaceService = new WorkspaceService();
|
||||
const instanceWorkspaceService = new InstanceWorkspaceService();
|
||||
|
||||
export const WorkspaceCreateForm = () => {
|
||||
// router
|
||||
@@ -40,8 +38,8 @@ export const WorkspaceCreateForm = () => {
|
||||
const workspaceBaseURL = encodeURI(WEB_BASE_URL || window.location.origin + "/");
|
||||
|
||||
const handleCreateWorkspace = async (formData: IWorkspace) => {
|
||||
await workspaceService
|
||||
.workspaceSlugCheck(formData.slug)
|
||||
await instanceWorkspaceService
|
||||
.slugCheck(formData.slug)
|
||||
.then(async (res) => {
|
||||
if (res.status === true && !RESTRICTED_URLS.includes(formData.slug)) {
|
||||
setSlugError(false);
|
||||
|
||||
@@ -7,12 +7,11 @@ import { LogOut, UserCog2, Palette } from "lucide-react";
|
||||
import { Menu, Transition } from "@headlessui/react";
|
||||
// plane internal packages
|
||||
import { API_BASE_URL } from "@plane/constants";
|
||||
import {AuthService } from "@plane/services";
|
||||
import { Avatar } from "@plane/ui";
|
||||
import { getFileURL, cn } from "@plane/utils";
|
||||
// hooks
|
||||
import { useTheme, useUser } from "@/hooks/store";
|
||||
// services
|
||||
import { AuthService } from "@/services/auth.service";
|
||||
|
||||
// service initialization
|
||||
const authService = new AuthService();
|
||||
|
||||
@@ -6,12 +6,11 @@ import { useSearchParams } from "next/navigation";
|
||||
import { Eye, EyeOff } from "lucide-react";
|
||||
// plane internal packages
|
||||
import { API_BASE_URL, E_PASSWORD_STRENGTH } from "@plane/constants";
|
||||
import { AuthService } from "@plane/services";
|
||||
import { Button, Checkbox, Input, Spinner } from "@plane/ui";
|
||||
import { getPasswordStrength } from "@plane/utils";
|
||||
// components
|
||||
import { Banner, PasswordStrengthMeter } from "@/components/common";
|
||||
// services
|
||||
import { AuthService } from "@/services/auth.service";
|
||||
|
||||
// service initialization
|
||||
const authService = new AuthService();
|
||||
@@ -337,7 +336,7 @@ export const InstanceSetupForm: FC = (props) => {
|
||||
</label>
|
||||
<a
|
||||
tabIndex={-1}
|
||||
href="https://docs.plane.so/telemetry"
|
||||
href="https://developers.plane.so/self-hosting/telemetry"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-sm font-medium text-blue-500 hover:text-blue-600"
|
||||
|
||||
@@ -5,13 +5,12 @@ import { useSearchParams } from "next/navigation";
|
||||
import { Eye, EyeOff } from "lucide-react";
|
||||
// plane internal packages
|
||||
import { API_BASE_URL, EAdminAuthErrorCodes, TAuthErrorInfo } from "@plane/constants";
|
||||
import { AuthService } from "@plane/services";
|
||||
import { Button, Input, Spinner } from "@plane/ui";
|
||||
// components
|
||||
import { Banner } from "@/components/common";
|
||||
// helpers
|
||||
import { authErrorHandler } from "@/lib/auth-helpers";
|
||||
// services
|
||||
import { AuthService } from "@/services/auth.service";
|
||||
// local components
|
||||
import { AuthBanner } from "../authentication";
|
||||
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse } from "axios";
|
||||
// store
|
||||
// import { rootStore } from "@/lib/store-context";
|
||||
|
||||
export abstract class APIService {
|
||||
protected baseURL: string;
|
||||
private axiosInstance: AxiosInstance;
|
||||
|
||||
constructor(baseURL: string) {
|
||||
this.baseURL = baseURL;
|
||||
this.axiosInstance = axios.create({
|
||||
baseURL,
|
||||
withCredentials: true,
|
||||
});
|
||||
|
||||
this.setupInterceptors();
|
||||
}
|
||||
|
||||
private setupInterceptors() {
|
||||
// this.axiosInstance.interceptors.response.use(
|
||||
// (response) => response,
|
||||
// (error) => {
|
||||
// const store = rootStore;
|
||||
// if (error.response && error.response.status === 401 && store.user.currentUser) store.user.reset();
|
||||
// return Promise.reject(error);
|
||||
// }
|
||||
// );
|
||||
}
|
||||
|
||||
get<ResponseType>(url: string, params = {}): Promise<AxiosResponse<ResponseType>> {
|
||||
return this.axiosInstance.get(url, { params });
|
||||
}
|
||||
|
||||
post<RequestType, ResponseType>(url: string, data: RequestType, config = {}): Promise<AxiosResponse<ResponseType>> {
|
||||
return this.axiosInstance.post(url, data, config);
|
||||
}
|
||||
|
||||
put<RequestType, ResponseType>(url: string, data: RequestType, config = {}): Promise<AxiosResponse<ResponseType>> {
|
||||
return this.axiosInstance.put(url, data, config);
|
||||
}
|
||||
|
||||
patch<RequestType, ResponseType>(url: string, data: RequestType, config = {}): Promise<AxiosResponse<ResponseType>> {
|
||||
return this.axiosInstance.patch(url, data, config);
|
||||
}
|
||||
|
||||
delete<RequestType>(url: string, data?: RequestType, config = {}) {
|
||||
return this.axiosInstance.delete(url, { data, ...config });
|
||||
}
|
||||
|
||||
request<T>(config: AxiosRequestConfig = {}): Promise<AxiosResponse<T>> {
|
||||
return this.axiosInstance(config);
|
||||
}
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
import { API_BASE_URL } from "@plane/constants";
|
||||
// services
|
||||
import { APIService } from "@/services/api.service";
|
||||
|
||||
type TCsrfTokenResponse = {
|
||||
csrf_token: string;
|
||||
};
|
||||
|
||||
export class AuthService extends APIService {
|
||||
constructor() {
|
||||
super(API_BASE_URL);
|
||||
}
|
||||
|
||||
async requestCSRFToken(): Promise<TCsrfTokenResponse> {
|
||||
return this.get<TCsrfTokenResponse>("/auth/get-csrf-token/")
|
||||
.then((response) => response.data)
|
||||
.catch((error) => {
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
// plane internal packages
|
||||
import { API_BASE_URL } from "@plane/constants";
|
||||
import type {
|
||||
IFormattedInstanceConfiguration,
|
||||
IInstance,
|
||||
IInstanceAdmin,
|
||||
IInstanceConfiguration,
|
||||
IInstanceInfo,
|
||||
} from "@plane/types";
|
||||
// helpers
|
||||
import { APIService } from "@/services/api.service";
|
||||
|
||||
export class InstanceService extends APIService {
|
||||
constructor() {
|
||||
super(API_BASE_URL);
|
||||
}
|
||||
|
||||
async getInstanceInfo(): Promise<IInstanceInfo> {
|
||||
return this.get<IInstanceInfo>("/api/instances/")
|
||||
.then((response) => response.data)
|
||||
.catch((error) => {
|
||||
throw error?.response?.data;
|
||||
});
|
||||
}
|
||||
|
||||
async getInstanceAdmins(): Promise<IInstanceAdmin[]> {
|
||||
return this.get<IInstanceAdmin[]>("/api/instances/admins/")
|
||||
.then((response) => response.data)
|
||||
.catch((error) => {
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
async updateInstanceInfo(data: Partial<IInstance>): Promise<IInstance> {
|
||||
return this.patch<Partial<IInstance>, IInstance>("/api/instances/", data)
|
||||
.then((response) => response?.data)
|
||||
.catch((error) => {
|
||||
throw error?.response?.data;
|
||||
});
|
||||
}
|
||||
|
||||
async getInstanceConfigurations() {
|
||||
return this.get<IInstanceConfiguration[]>("/api/instances/configurations/")
|
||||
.then((response) => response.data)
|
||||
.catch((error) => {
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
async updateInstanceConfigurations(
|
||||
data: Partial<IFormattedInstanceConfiguration>
|
||||
): Promise<IInstanceConfiguration[]> {
|
||||
return this.patch<Partial<IFormattedInstanceConfiguration>, IInstanceConfiguration[]>(
|
||||
"/api/instances/configurations/",
|
||||
data
|
||||
)
|
||||
.then((response) => response?.data)
|
||||
.catch((error) => {
|
||||
throw error?.response?.data;
|
||||
});
|
||||
}
|
||||
|
||||
async sendTestEmail(receiverEmail: string): Promise<undefined> {
|
||||
return this.post<{ receiver_email: string }, undefined>("/api/instances/email-credentials-check/", {
|
||||
receiver_email: receiverEmail,
|
||||
})
|
||||
.then((response) => response?.data)
|
||||
.catch((error) => {
|
||||
throw error?.response?.data;
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
// plane internal packages
|
||||
import { API_BASE_URL } from "@plane/constants";
|
||||
import type { IUser } from "@plane/types";
|
||||
// services
|
||||
import { APIService } from "@/services/api.service";
|
||||
|
||||
interface IUserSession extends IUser {
|
||||
isAuthenticated: boolean;
|
||||
}
|
||||
|
||||
export class UserService extends APIService {
|
||||
constructor() {
|
||||
super(API_BASE_URL);
|
||||
}
|
||||
|
||||
async authCheck(): Promise<IUserSession> {
|
||||
return this.get<any>("/api/instances/admins/me/")
|
||||
.then((response) => ({ ...response?.data, isAuthenticated: true }))
|
||||
.catch(() => ({ isAuthenticated: false }));
|
||||
}
|
||||
|
||||
async currentUser(): Promise<IUser> {
|
||||
return this.get<IUser>("/api/instances/admins/me/")
|
||||
.then((response) => response?.data)
|
||||
.catch((error) => {
|
||||
throw error?.response;
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
// plane internal packages
|
||||
import { API_BASE_URL } from "@plane/constants";
|
||||
import type { IWorkspace, TWorkspacePaginationInfo } from "@plane/types";
|
||||
// services
|
||||
import { APIService } from "@/services/api.service";
|
||||
|
||||
export class WorkspaceService extends APIService {
|
||||
constructor() {
|
||||
super(API_BASE_URL);
|
||||
}
|
||||
|
||||
/**
|
||||
* @description Fetches all workspaces
|
||||
* @returns Promise<TWorkspacePaginationInfo>
|
||||
*/
|
||||
async getWorkspaces(nextPageCursor?: string): Promise<TWorkspacePaginationInfo> {
|
||||
return this.get<TWorkspacePaginationInfo>("/api/instances/workspaces/", {
|
||||
cursor: nextPageCursor,
|
||||
})
|
||||
.then((response) => response.data)
|
||||
.catch((error) => {
|
||||
throw error?.response?.data;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @description Checks if a slug is available
|
||||
* @param slug - string
|
||||
* @returns Promise<any>
|
||||
*/
|
||||
async workspaceSlugCheck(slug: string): Promise<any> {
|
||||
const params = new URLSearchParams({ slug });
|
||||
return this.get(`/api/instances/workspace-slug-check/?${params.toString()}`)
|
||||
.then((response) => response?.data)
|
||||
.catch((error) => {
|
||||
throw error?.response?.data;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @description Creates a new workspace
|
||||
* @param data - IWorkspace
|
||||
* @returns Promise<IWorkspace>
|
||||
*/
|
||||
async createWorkspace(data: IWorkspace): Promise<IWorkspace> {
|
||||
return this.post<IWorkspace, IWorkspace>("/api/instances/workspaces/", data)
|
||||
.then((response) => response.data)
|
||||
.catch((error) => {
|
||||
throw error?.response?.data;
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@ import set from "lodash/set";
|
||||
import { observable, action, computed, makeObservable, runInAction } from "mobx";
|
||||
// plane internal packages
|
||||
import { EInstanceStatus, TInstanceStatus } from "@plane/constants";
|
||||
import {InstanceService} from "@plane/services";
|
||||
import {
|
||||
IInstance,
|
||||
IInstanceAdmin,
|
||||
@@ -10,8 +11,6 @@ import {
|
||||
IInstanceInfo,
|
||||
IInstanceConfig,
|
||||
} from "@plane/types";
|
||||
// services
|
||||
import { InstanceService } from "@/services/instance.service";
|
||||
// root store
|
||||
import { CoreRootStore } from "@/store/root.store";
|
||||
|
||||
@@ -96,7 +95,7 @@ export class InstanceStore implements IInstanceStore {
|
||||
try {
|
||||
if (this.instance === undefined) this.isLoading = true;
|
||||
this.error = undefined;
|
||||
const instanceInfo = await this.instanceService.getInstanceInfo();
|
||||
const instanceInfo = await this.instanceService.info();
|
||||
// handling the new user popup toggle
|
||||
if (this.instance === undefined && !instanceInfo?.instance?.workspaces_exist)
|
||||
this.store.theme.toggleNewUserPopup();
|
||||
@@ -125,7 +124,7 @@ export class InstanceStore implements IInstanceStore {
|
||||
*/
|
||||
updateInstanceInfo = async (data: Partial<IInstance>) => {
|
||||
try {
|
||||
const instanceResponse = await this.instanceService.updateInstanceInfo(data);
|
||||
const instanceResponse = await this.instanceService.update(data);
|
||||
if (instanceResponse) {
|
||||
runInAction(() => {
|
||||
if (this.instance) set(this.instance, "instance", instanceResponse);
|
||||
@@ -144,7 +143,7 @@ export class InstanceStore implements IInstanceStore {
|
||||
*/
|
||||
fetchInstanceAdmins = async () => {
|
||||
try {
|
||||
const instanceAdmins = await this.instanceService.getInstanceAdmins();
|
||||
const instanceAdmins = await this.instanceService.admins();
|
||||
if (instanceAdmins) runInAction(() => (this.instanceAdmins = instanceAdmins));
|
||||
return instanceAdmins;
|
||||
} catch (error) {
|
||||
@@ -159,7 +158,7 @@ export class InstanceStore implements IInstanceStore {
|
||||
*/
|
||||
fetchInstanceConfigurations = async () => {
|
||||
try {
|
||||
const instanceConfigurations = await this.instanceService.getInstanceConfigurations();
|
||||
const instanceConfigurations = await this.instanceService.configurations();
|
||||
if (instanceConfigurations) runInAction(() => (this.instanceConfigurations = instanceConfigurations));
|
||||
return instanceConfigurations;
|
||||
} catch (error) {
|
||||
@@ -174,7 +173,7 @@ export class InstanceStore implements IInstanceStore {
|
||||
*/
|
||||
updateInstanceConfigurations = async (data: Partial<IFormattedInstanceConfiguration>) => {
|
||||
try {
|
||||
const response = await this.instanceService.updateInstanceConfigurations(data);
|
||||
const response = await this.instanceService.updateConfigurations(data);
|
||||
runInAction(() => {
|
||||
this.instanceConfigurations = this.instanceConfigurations?.map((config) => {
|
||||
const item = response.find((item) => item.key === config.key);
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import { action, observable, runInAction, makeObservable } from "mobx";
|
||||
// plane internal packages
|
||||
import { EUserStatus, TUserStatus } from "@plane/constants";
|
||||
import { AuthService, UserService } from "@plane/services";
|
||||
import { IUser } from "@plane/types";
|
||||
// services
|
||||
import { AuthService } from "@/services/auth.service";
|
||||
import { UserService } from "@/services/user.service";
|
||||
// root store
|
||||
import { CoreRootStore } from "@/store/root.store";
|
||||
|
||||
@@ -58,7 +56,7 @@ export class UserStore implements IUserStore {
|
||||
fetchCurrentUser = async () => {
|
||||
try {
|
||||
if (this.currentUser === undefined) this.isLoading = true;
|
||||
const currentUser = await this.userService.currentUser();
|
||||
const currentUser = await this.userService.adminDetails();
|
||||
if (currentUser) {
|
||||
await this.store.instance.fetchInstanceAdmins();
|
||||
runInAction(() => {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import set from "lodash/set";
|
||||
import { action, observable, runInAction, makeObservable, computed } from "mobx";
|
||||
// plane imports
|
||||
import { InstanceWorkspaceService } from "@plane/services";
|
||||
import { IWorkspace, TLoader, TPaginationInfo } from "@plane/types";
|
||||
// services
|
||||
import { WorkspaceService } from "@/services/workspace.service";
|
||||
// root store
|
||||
import { CoreRootStore } from "@/store/root.store";
|
||||
|
||||
@@ -29,7 +29,7 @@ export class WorkspaceStore implements IWorkspaceStore {
|
||||
workspaces: Record<string, IWorkspace> = {};
|
||||
paginationInfo: TPaginationInfo | undefined = undefined;
|
||||
// services
|
||||
workspaceService;
|
||||
instanceWorkspaceService;
|
||||
|
||||
constructor(private store: CoreRootStore) {
|
||||
makeObservable(this, {
|
||||
@@ -48,7 +48,7 @@ export class WorkspaceStore implements IWorkspaceStore {
|
||||
// curd actions
|
||||
createWorkspace: action,
|
||||
});
|
||||
this.workspaceService = new WorkspaceService();
|
||||
this.instanceWorkspaceService = new InstanceWorkspaceService();
|
||||
}
|
||||
|
||||
// computed
|
||||
@@ -84,7 +84,7 @@ export class WorkspaceStore implements IWorkspaceStore {
|
||||
} else {
|
||||
this.loader = "init-loader";
|
||||
}
|
||||
const paginatedWorkspaceData = await this.workspaceService.getWorkspaces();
|
||||
const paginatedWorkspaceData = await this.instanceWorkspaceService.list();
|
||||
runInAction(() => {
|
||||
const { results, ...paginationInfo } = paginatedWorkspaceData;
|
||||
results.forEach((workspace: IWorkspace) => {
|
||||
@@ -109,7 +109,7 @@ export class WorkspaceStore implements IWorkspaceStore {
|
||||
if (!this.paginationInfo || this.paginationInfo.next_page_results === false) return [];
|
||||
try {
|
||||
this.loader = "pagination";
|
||||
const paginatedWorkspaceData = await this.workspaceService.getWorkspaces(this.paginationInfo.next_cursor);
|
||||
const paginatedWorkspaceData = await this.instanceWorkspaceService.list(this.paginationInfo.next_cursor);
|
||||
runInAction(() => {
|
||||
const { results, ...paginationInfo } = paginatedWorkspaceData;
|
||||
results.forEach((workspace: IWorkspace) => {
|
||||
@@ -135,7 +135,7 @@ export class WorkspaceStore implements IWorkspaceStore {
|
||||
createWorkspace = async (data: IWorkspace): Promise<IWorkspace> => {
|
||||
try {
|
||||
this.loader = "mutation";
|
||||
const workspace = await this.workspaceService.createWorkspace(data);
|
||||
const workspace = await this.instanceWorkspaceService.create(data);
|
||||
runInAction(() => {
|
||||
set(this.workspaces, [workspace.id], workspace);
|
||||
});
|
||||
|
||||
@@ -9,6 +9,19 @@ const nextConfig = {
|
||||
unoptimized: true,
|
||||
},
|
||||
basePath: process.env.NEXT_PUBLIC_ADMIN_BASE_PATH || "",
|
||||
transpilePackages: [
|
||||
"@plane/constants",
|
||||
"@plane/editor",
|
||||
"@plane/hooks",
|
||||
"@plane/i18n",
|
||||
"@plane/logger",
|
||||
"@plane/propel",
|
||||
"@plane/services",
|
||||
"@plane/shared-state",
|
||||
"@plane/types",
|
||||
"@plane/ui",
|
||||
"@plane/utils",
|
||||
],
|
||||
};
|
||||
|
||||
module.exports = nextConfig;
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
{
|
||||
"name": "admin",
|
||||
"version": "0.24.1",
|
||||
"description": "Admin UI for Plane",
|
||||
"version": "0.25.3",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "turbo run develop",
|
||||
@@ -18,35 +20,34 @@
|
||||
"@plane/types": "*",
|
||||
"@plane/ui": "*",
|
||||
"@plane/utils": "*",
|
||||
"@sentry/nextjs": "^8.32.0",
|
||||
"@plane/services": "*",
|
||||
"@tailwindcss/typography": "^0.5.9",
|
||||
"@types/lodash": "^4.17.0",
|
||||
"autoprefixer": "10.4.14",
|
||||
"axios": "^1.7.4",
|
||||
"axios": "^1.8.3",
|
||||
"lodash": "^4.17.21",
|
||||
"lucide-react": "^0.356.0",
|
||||
"lucide-react": "^0.469.0",
|
||||
"mobx": "^6.12.0",
|
||||
"mobx-react": "^9.1.1",
|
||||
"next": "^14.2.20",
|
||||
"next": "^14.2.26",
|
||||
"next-themes": "^0.2.1",
|
||||
"postcss": "^8.4.38",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-hook-form": "7.51.5",
|
||||
"swr": "^2.2.4",
|
||||
"tailwindcss": "3.3.2",
|
||||
"uuid": "^9.0.1",
|
||||
"zxcvbn": "^4.4.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@plane/eslint-config": "*",
|
||||
"@plane/tailwind-config": "*",
|
||||
"@plane/typescript-config": "*",
|
||||
"@types/node": "18.16.1",
|
||||
"@types/react": "^18.3.11",
|
||||
"@types/react-dom": "^18.2.18",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"@types/zxcvbn": "^4.4.4",
|
||||
"tailwind-config-custom": "*",
|
||||
"typescript": "5.3.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
const sharedConfig = require("tailwind-config-custom/tailwind.config.js");
|
||||
/* eslint-disable @typescript-eslint/no-require-imports */
|
||||
const sharedConfig = require("@plane/tailwind-config/tailwind.config.js");
|
||||
|
||||
module.exports = {
|
||||
presets: [sharedConfig],
|
||||
|
||||
@@ -145,11 +145,8 @@ RUN chmod +x /app/pg-setup.sh
|
||||
# APPLICATION ENVIRONMENT SETTINGS
|
||||
# *****************************************************************************
|
||||
ENV APP_DOMAIN=localhost
|
||||
|
||||
ENV WEB_URL=http://${APP_DOMAIN}
|
||||
ENV DEBUG=0
|
||||
ENV SENTRY_DSN=
|
||||
ENV SENTRY_ENVIRONMENT=production
|
||||
ENV CORS_ALLOWED_ORIGINS=http://${APP_DOMAIN},https://${APP_DOMAIN}
|
||||
# Secret Key
|
||||
ENV SECRET_KEY=60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5
|
||||
|
||||
@@ -3,10 +3,6 @@
|
||||
DEBUG=0
|
||||
CORS_ALLOWED_ORIGINS="http://localhost"
|
||||
|
||||
# Error logs
|
||||
SENTRY_DSN=""
|
||||
SENTRY_ENVIRONMENT="development"
|
||||
|
||||
# Database Settings
|
||||
POSTGRES_USER="plane"
|
||||
POSTGRES_PASSWORD="plane"
|
||||
@@ -59,4 +55,10 @@ APP_BASE_URL=
|
||||
|
||||
|
||||
# Hard delete files after days
|
||||
HARD_DELETE_AFTER_DAYS=60
|
||||
HARD_DELETE_AFTER_DAYS=60
|
||||
|
||||
# Force HTTPS for handling SSL Termination
|
||||
MINIO_ENDPOINT_SSL=0
|
||||
|
||||
# API key rate limit
|
||||
API_KEY_RATE_LIMIT="60/minute"
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
{
|
||||
"name": "plane-api",
|
||||
"version": "0.24.1"
|
||||
"version": "0.25.3",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"description": "API server powering Plane's backend"
|
||||
}
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
# python imports
|
||||
import os
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.throttling import SimpleRateThrottle
|
||||
|
||||
|
||||
class ApiKeyRateThrottle(SimpleRateThrottle):
|
||||
scope = "api_key"
|
||||
rate = "60/minute"
|
||||
rate = os.environ.get("API_KEY_RATE_LIMIT", "60/minute")
|
||||
|
||||
def get_cache_key(self, request, view):
|
||||
# Retrieve the API key from the request header
|
||||
|
||||
@@ -15,3 +15,4 @@ from .state import StateLiteSerializer, StateSerializer
|
||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
|
||||
from .module import ModuleSerializer, ModuleIssueSerializer, ModuleLiteSerializer
|
||||
from .intake import IntakeIssueSerializer
|
||||
from .estimate import EstimatePointSerializer
|
||||
@@ -72,6 +72,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
StateLiteSerializer,
|
||||
UserLiteSerializer,
|
||||
WorkspaceLiteSerializer,
|
||||
EstimatePointSerializer,
|
||||
)
|
||||
|
||||
# Expansion mapper
|
||||
@@ -88,6 +89,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
"owned_by": UserLiteSerializer,
|
||||
"members": UserLiteSerializer,
|
||||
"parent": IssueLiteSerializer,
|
||||
"estimate_point": EstimatePointSerializer,
|
||||
}
|
||||
# Check if field in expansion then expand the field
|
||||
if expand in expansion:
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
# Third party imports
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
@@ -6,6 +7,7 @@ from .base import BaseSerializer
|
||||
from plane.db.models import Cycle, CycleIssue
|
||||
from plane.utils.timezone_converter import convert_to_utc
|
||||
|
||||
|
||||
class CycleSerializer(BaseSerializer):
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||
@@ -17,6 +19,14 @@ class CycleSerializer(BaseSerializer):
|
||||
completed_estimates = serializers.FloatField(read_only=True)
|
||||
started_estimates = serializers.FloatField(read_only=True)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
project = self.context.get("project")
|
||||
if project and project.timezone:
|
||||
project_timezone = pytz.timezone(project.timezone)
|
||||
self.fields["start_date"].timezone = project_timezone
|
||||
self.fields["end_date"].timezone = project_timezone
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
@@ -30,11 +40,20 @@ class CycleSerializer(BaseSerializer):
|
||||
and data.get("end_date", None) is not None
|
||||
):
|
||||
project_id = self.initial_data.get("project_id") or self.instance.project_id
|
||||
is_start_date_end_date_equal = (
|
||||
True
|
||||
if str(data.get("start_date")) == str(data.get("end_date"))
|
||||
else False
|
||||
)
|
||||
data["start_date"] = convert_to_utc(
|
||||
str(data.get("start_date").date()), project_id, is_start_date=True
|
||||
date=str(data.get("start_date").date()),
|
||||
project_id=project_id,
|
||||
is_start_date=True,
|
||||
)
|
||||
data["end_date"] = convert_to_utc(
|
||||
str(data.get("end_date", None).date()), project_id
|
||||
date=str(data.get("end_date", None).date()),
|
||||
project_id=project_id,
|
||||
is_start_date_end_date_equal=is_start_date_end_date_equal,
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
10
apiserver/plane/api/serializers/estimate.py
Normal file
10
apiserver/plane/api/serializers/estimate.py
Normal file
@@ -0,0 +1,10 @@
|
||||
# Module imports
|
||||
from plane.db.models import EstimatePoint
|
||||
from .base import BaseSerializer
|
||||
|
||||
|
||||
class EstimatePointSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = EstimatePoint
|
||||
fields = ["id", "value"]
|
||||
read_only_fields = fields
|
||||
@@ -1,6 +1,7 @@
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from lxml import html
|
||||
from django.db import IntegrityError
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
@@ -79,6 +80,7 @@ class IssueSerializer(BaseSerializer):
|
||||
data["assignees"] = ProjectMember.objects.filter(
|
||||
project_id=self.context.get("project_id"),
|
||||
is_active=True,
|
||||
role__gte=15,
|
||||
member_id__in=data["assignees"],
|
||||
).values_list("member_id", flat=True)
|
||||
|
||||
@@ -138,47 +140,61 @@ class IssueSerializer(BaseSerializer):
|
||||
updated_by_id = issue.updated_by_id
|
||||
|
||||
if assignees is not None and len(assignees):
|
||||
IssueAssignee.objects.bulk_create(
|
||||
[
|
||||
IssueAssignee(
|
||||
assignee_id=assignee_id,
|
||||
try:
|
||||
IssueAssignee.objects.bulk_create(
|
||||
[
|
||||
IssueAssignee(
|
||||
assignee_id=assignee_id,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for assignee_id in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
# Then assign it to default assignee, if it is a valid assignee
|
||||
if default_assignee_id is not None and ProjectMember.objects.filter(
|
||||
member_id=default_assignee_id,
|
||||
project_id=project_id,
|
||||
role__gte=15,
|
||||
is_active=True
|
||||
).exists():
|
||||
IssueAssignee.objects.create(
|
||||
assignee_id=default_assignee_id,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for assignee_id in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
else:
|
||||
# Then assign it to default assignee
|
||||
if default_assignee_id is not None:
|
||||
IssueAssignee.objects.create(
|
||||
assignee_id=default_assignee_id,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
if labels is not None and len(labels):
|
||||
IssueLabel.objects.bulk_create(
|
||||
[
|
||||
IssueLabel(
|
||||
label_id=label_id,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label_id in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
try:
|
||||
IssueLabel.objects.bulk_create(
|
||||
[
|
||||
IssueLabel(
|
||||
label_id=label_id,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label_id in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
return issue
|
||||
|
||||
@@ -194,37 +210,45 @@ class IssueSerializer(BaseSerializer):
|
||||
|
||||
if assignees is not None:
|
||||
IssueAssignee.objects.filter(issue=instance).delete()
|
||||
IssueAssignee.objects.bulk_create(
|
||||
[
|
||||
IssueAssignee(
|
||||
assignee_id=assignee_id,
|
||||
issue=instance,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for assignee_id in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
try:
|
||||
IssueAssignee.objects.bulk_create(
|
||||
[
|
||||
IssueAssignee(
|
||||
assignee_id=assignee_id,
|
||||
issue=instance,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for assignee_id in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
if labels is not None:
|
||||
IssueLabel.objects.filter(issue=instance).delete()
|
||||
IssueLabel.objects.bulk_create(
|
||||
[
|
||||
IssueLabel(
|
||||
label_id=label_id,
|
||||
issue=instance,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label_id in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
try:
|
||||
IssueLabel.objects.bulk_create(
|
||||
[
|
||||
IssueLabel(
|
||||
label_id=label_id,
|
||||
issue=instance,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label_id in labels
|
||||
],
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
# Time updation occues even when other related models are updated
|
||||
instance.updated_at = timezone.now()
|
||||
|
||||
@@ -71,4 +71,9 @@ urlpatterns = [
|
||||
IssueAttachmentEndpoint.as_view(),
|
||||
name="attachment",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/<uuid:pk>/",
|
||||
IssueAttachmentEndpoint.as_view(),
|
||||
name="issue-attachment",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -39,7 +39,7 @@ from plane.db.models import (
|
||||
UserFavorite,
|
||||
)
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
|
||||
from plane.utils.host import base_host
|
||||
from .base import BaseAPIView
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
|
||||
@@ -137,10 +137,12 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
||||
if pk:
|
||||
queryset = self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
|
||||
data = CycleSerializer(
|
||||
queryset, fields=self.fields, expand=self.expand
|
||||
queryset, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
).data
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
queryset = self.get_queryset().filter(archived_at__isnull=True)
|
||||
@@ -152,7 +154,8 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
start_date__lte=timezone.now(), end_date__gte=timezone.now()
|
||||
)
|
||||
data = CycleSerializer(
|
||||
queryset, many=True, fields=self.fields, expand=self.expand
|
||||
queryset, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
).data
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -163,7 +166,8 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields, expand=self.expand
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -174,7 +178,8 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields, expand=self.expand
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -185,7 +190,8 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields, expand=self.expand
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -198,14 +204,16 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields, expand=self.expand
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
).data,
|
||||
)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields, expand=self.expand
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -251,7 +259,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -323,7 +331,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -694,7 +702,7 @@ class CycleIssueAPIEndpoint(BaseAPIView):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
# Return all Cycle Issues
|
||||
return Response(
|
||||
@@ -1168,7 +1176,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -18,7 +18,7 @@ from plane.api.serializers import IntakeIssueSerializer, IssueSerializer
|
||||
from plane.app.permissions import ProjectLitePermission
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import Intake, IntakeIssue, Issue, Project, ProjectMember, State
|
||||
|
||||
from plane.utils.host import base_host
|
||||
from .base import BaseAPIView
|
||||
|
||||
|
||||
@@ -109,16 +109,6 @@ class IntakeIssueAPIEndpoint(BaseAPIView):
|
||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Create or get state
|
||||
state, _ = State.objects.get_or_create(
|
||||
name="Triage",
|
||||
group="triage",
|
||||
description="Default state for managing all Intake Issues",
|
||||
project_id=project_id,
|
||||
color="#ff7700",
|
||||
is_triage=True,
|
||||
)
|
||||
|
||||
# create an issue
|
||||
issue = Issue.objects.create(
|
||||
name=request.data.get("issue", {}).get("name"),
|
||||
@@ -128,7 +118,6 @@ class IntakeIssueAPIEndpoint(BaseAPIView):
|
||||
),
|
||||
priority=request.data.get("issue", {}).get("priority", "none"),
|
||||
project_id=project_id,
|
||||
state=state,
|
||||
)
|
||||
|
||||
# create an intake issue
|
||||
@@ -308,7 +297,7 @@ class IntakeIssueAPIEndpoint(BaseAPIView):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=False,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
intake=str(intake_issue.id),
|
||||
)
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
import uuid
|
||||
|
||||
# Django imports
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import (
|
||||
Case,
|
||||
@@ -19,11 +20,11 @@ from django.db.models import (
|
||||
Subquery,
|
||||
)
|
||||
from django.utils import timezone
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.parsers import MultiPartParser, FormParser
|
||||
|
||||
# Module imports
|
||||
from plane.api.serializers import (
|
||||
@@ -50,9 +51,12 @@ from plane.db.models import (
|
||||
Project,
|
||||
ProjectMember,
|
||||
CycleIssue,
|
||||
Workspace,
|
||||
)
|
||||
|
||||
from plane.settings.storage import S3Storage
|
||||
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
|
||||
from .base import BaseAPIView
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class WorkspaceIssueAPIEndpoint(BaseAPIView):
|
||||
@@ -940,37 +944,162 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
serializer_class = IssueAttachmentSerializer
|
||||
permission_classes = [ProjectEntityPermission]
|
||||
model = FileAsset
|
||||
parser_classes = (MultiPartParser, FormParser)
|
||||
|
||||
def post(self, request, slug, project_id, issue_id):
|
||||
serializer = IssueAttachmentSerializer(data=request.data)
|
||||
name = request.data.get("name")
|
||||
type = request.data.get("type", False)
|
||||
size = request.data.get("size")
|
||||
external_id = request.data.get("external_id")
|
||||
external_source = request.data.get("external_source")
|
||||
|
||||
# Check if the request is valid
|
||||
if not name or not size:
|
||||
return Response(
|
||||
{"error": "Invalid request.", "status": False},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
size_limit = min(size, settings.FILE_SIZE_LIMIT)
|
||||
|
||||
if not type or type not in settings.ATTACHMENT_MIME_TYPES:
|
||||
return Response(
|
||||
{"error": "Invalid file type.", "status": False},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the workspace
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
# asset key
|
||||
asset_key = f"{workspace.id}/{uuid.uuid4().hex}-{name}"
|
||||
|
||||
if (
|
||||
request.data.get("external_id")
|
||||
and request.data.get("external_source")
|
||||
and FileAsset.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
issue_id=issue_id,
|
||||
external_source=request.data.get("external_source"),
|
||||
external_id=request.data.get("external_id"),
|
||||
issue_id=issue_id,
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
).exists()
|
||||
):
|
||||
issue_attachment = FileAsset.objects.filter(
|
||||
workspace__slug=slug,
|
||||
asset = FileAsset.objects.filter(
|
||||
project_id=project_id,
|
||||
external_id=request.data.get("external_id"),
|
||||
workspace__slug=slug,
|
||||
external_source=request.data.get("external_source"),
|
||||
external_id=request.data.get("external_id"),
|
||||
issue_id=issue_id,
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
).first()
|
||||
return Response(
|
||||
{
|
||||
"error": "Issue attachment with the same external id and external source already exists",
|
||||
"id": str(issue_attachment.id),
|
||||
"error": "Issue with the same external id and external source already exists",
|
||||
"id": str(asset.id),
|
||||
},
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save(project_id=project_id, issue_id=issue_id)
|
||||
# Create a File Asset
|
||||
asset = FileAsset.objects.create(
|
||||
attributes={"name": name, "type": type, "size": size_limit},
|
||||
asset=asset_key,
|
||||
size=size_limit,
|
||||
workspace_id=workspace.id,
|
||||
created_by=request.user,
|
||||
issue_id=issue_id,
|
||||
project_id=project_id,
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
external_id=external_id,
|
||||
external_source=external_source,
|
||||
)
|
||||
|
||||
# Get the presigned URL
|
||||
storage = S3Storage(request=request)
|
||||
# Generate a presigned URL to share an S3 object
|
||||
presigned_url = storage.generate_presigned_post(
|
||||
object_name=asset_key, file_type=type, file_size=size_limit
|
||||
)
|
||||
# Return the presigned URL
|
||||
return Response(
|
||||
{
|
||||
"upload_data": presigned_url,
|
||||
"asset_id": str(asset.id),
|
||||
"attachment": IssueAttachmentSerializer(asset).data,
|
||||
"asset_url": asset.asset_url,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def delete(self, request, slug, project_id, issue_id, pk):
|
||||
issue_attachment = FileAsset.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
issue_attachment.is_deleted = True
|
||||
issue_attachment.deleted_at = timezone.now()
|
||||
issue_attachment.save()
|
||||
|
||||
issue_activity.delay(
|
||||
type="attachment.activity.deleted",
|
||||
requested_data=None,
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
# Get the storage metadata
|
||||
if not issue_attachment.storage_metadata:
|
||||
get_asset_object_metadata.delay(str(issue_attachment.id))
|
||||
issue_attachment.save()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def get(self, request, slug, project_id, issue_id, pk=None):
|
||||
if pk:
|
||||
# Get the asset
|
||||
asset = FileAsset.objects.get(
|
||||
id=pk, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
|
||||
# Check if the asset is uploaded
|
||||
if not asset.is_uploaded:
|
||||
return Response(
|
||||
{"error": "The asset is not uploaded.", "status": False},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
storage = S3Storage(request=request)
|
||||
presigned_url = storage.generate_presigned_url(
|
||||
object_name=asset.asset.name,
|
||||
disposition="attachment",
|
||||
filename=asset.attributes.get("name"),
|
||||
)
|
||||
return HttpResponseRedirect(presigned_url)
|
||||
|
||||
# Get all the attachments
|
||||
issue_attachments = FileAsset.objects.filter(
|
||||
issue_id=issue_id,
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
is_uploaded=True,
|
||||
)
|
||||
# Serialize the attachments
|
||||
serializer = IssueAttachmentSerializer(issue_attachments, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def patch(self, request, slug, project_id, issue_id, pk):
|
||||
issue_attachment = FileAsset.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = IssueAttachmentSerializer(issue_attachment)
|
||||
|
||||
# Send this activity only if the attachment is not uploaded before
|
||||
if not issue_attachment.is_uploaded:
|
||||
issue_activity.delay(
|
||||
type="attachment.activity.created",
|
||||
requested_data=None,
|
||||
@@ -980,32 +1109,15 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
current_instance=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request, slug, project_id, issue_id, pk):
|
||||
issue_attachment = FileAsset.objects.get(pk=pk)
|
||||
issue_attachment.asset.delete(save=False)
|
||||
issue_attachment.delete()
|
||||
issue_activity.delay(
|
||||
type="attachment.activity.deleted",
|
||||
requested_data=None,
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
# Update the attachment
|
||||
issue_attachment.is_uploaded = True
|
||||
issue_attachment.created_by = request.user
|
||||
|
||||
# Get the storage metadata
|
||||
if not issue_attachment.storage_metadata:
|
||||
get_asset_object_metadata.delay(str(issue_attachment.id))
|
||||
issue_attachment.save()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def get(self, request, slug, project_id, issue_id):
|
||||
issue_attachments = FileAsset.objects.filter(
|
||||
issue_id=issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = IssueAttachmentSerializer(issue_attachments, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -33,6 +33,7 @@ from plane.db.models import (
|
||||
|
||||
from .base import BaseAPIView
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ModuleAPIEndpoint(BaseAPIView):
|
||||
@@ -174,7 +175,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
module = Module.objects.get(pk=serializer.data["id"])
|
||||
serializer = ModuleSerializer(module)
|
||||
@@ -226,7 +227,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -280,6 +281,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps({"module_name": str(module.name)}),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
module.delete()
|
||||
# Delete the module issues
|
||||
@@ -449,6 +451,7 @@ class ModuleIssueAPIEndpoint(BaseAPIView):
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(
|
||||
|
||||
@@ -28,9 +28,9 @@ from plane.db.models import (
|
||||
Workspace,
|
||||
UserFavorite,
|
||||
)
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from plane.bgtasks.webhook_task import model_activity, webhook_activity
|
||||
from .base import BaseAPIView
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class ProjectAPIEndpoint(BaseAPIView):
|
||||
"""Project Endpoints to create, update, list, retrieve and delete endpoint"""
|
||||
@@ -228,7 +228,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
serializer = ProjectSerializer(project)
|
||||
@@ -258,7 +258,9 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
ProjectSerializer(project).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
intake_view = request.data.get("inbox_view", project.intake_view)
|
||||
intake_view = request.data.get(
|
||||
"inbox_view", request.data.get("intake_view", project.intake_view)
|
||||
)
|
||||
|
||||
if project.archived_at:
|
||||
return Response(
|
||||
@@ -286,16 +288,6 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
is_default=True,
|
||||
)
|
||||
|
||||
# Create the triage state in Backlog group
|
||||
State.objects.get_or_create(
|
||||
name="Triage",
|
||||
group="triage",
|
||||
description="Default state for managing all Intake Issues",
|
||||
project_id=pk,
|
||||
color="#ff7700",
|
||||
is_triage=True,
|
||||
)
|
||||
|
||||
project = self.get_queryset().filter(pk=serializer.data["id"]).first()
|
||||
|
||||
model_activity.delay(
|
||||
@@ -305,7 +297,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
serializer = ProjectSerializer(project)
|
||||
@@ -334,6 +326,19 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
entity_type="project", entity_identifier=pk, project_id=pk
|
||||
).delete()
|
||||
project.delete()
|
||||
webhook_activity.delay(
|
||||
event="project",
|
||||
verb="deleted",
|
||||
field=None,
|
||||
old_value=None,
|
||||
new_value=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
current_site=base_host(request=request, is_app=True),
|
||||
event_id=project.id,
|
||||
old_identifier=None,
|
||||
new_identifier=None,
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
|
||||
@@ -19,6 +19,10 @@ from .workspace import (
|
||||
WorkspaceMemberAdminSerializer,
|
||||
WorkspaceMemberMeSerializer,
|
||||
WorkspaceUserPropertiesSerializer,
|
||||
WorkspaceUserLinkSerializer,
|
||||
WorkspaceRecentVisitSerializer,
|
||||
WorkspaceHomePreferenceSerializer,
|
||||
StickySerializer,
|
||||
)
|
||||
from .project import (
|
||||
ProjectSerializer,
|
||||
@@ -68,6 +72,8 @@ from .issue import (
|
||||
IssueReactionLiteSerializer,
|
||||
IssueAttachmentLiteSerializer,
|
||||
IssueLinkLiteSerializer,
|
||||
IssueVersionDetailSerializer,
|
||||
IssueDescriptionVersionDetailSerializer,
|
||||
)
|
||||
|
||||
from .module import (
|
||||
@@ -115,8 +121,6 @@ from .exporter import ExporterHistorySerializer
|
||||
|
||||
from .webhook import WebhookSerializer, WebhookLogSerializer
|
||||
|
||||
from .dashboard import DashboardSerializer, WidgetSerializer
|
||||
|
||||
from .favorite import UserFavoriteSerializer
|
||||
|
||||
from .draft import (
|
||||
|
||||
@@ -20,12 +20,25 @@ class CycleWriteSerializer(BaseSerializer):
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("end_date", None) is not None
|
||||
):
|
||||
project_id = self.initial_data.get("project_id") or self.instance.project_id
|
||||
project_id = (
|
||||
self.initial_data.get("project_id", None)
|
||||
or (self.instance and self.instance.project_id)
|
||||
or self.context.get("project_id", None)
|
||||
)
|
||||
is_start_date_end_date_equal = (
|
||||
True
|
||||
if str(data.get("start_date")) == str(data.get("end_date"))
|
||||
else False
|
||||
)
|
||||
data["start_date"] = convert_to_utc(
|
||||
str(data.get("start_date").date()), project_id, is_start_date=True
|
||||
date=str(data.get("start_date").date()),
|
||||
project_id=project_id,
|
||||
is_start_date=True,
|
||||
)
|
||||
data["end_date"] = convert_to_utc(
|
||||
str(data.get("end_date", None).date()), project_id
|
||||
date=str(data.get("end_date", None).date()),
|
||||
project_id=project_id,
|
||||
is_start_date_end_date_equal=is_start_date_end_date_equal,
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import Dashboard, Widget
|
||||
|
||||
# Third party frameworks
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
class DashboardSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Dashboard
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class WidgetSerializer(BaseSerializer):
|
||||
is_visible = serializers.BooleanField(read_only=True)
|
||||
widget_filters = serializers.JSONField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Widget
|
||||
fields = ["id", "key", "is_visible", "widget_filters"]
|
||||
@@ -53,7 +53,6 @@ def get_entity_model_and_serializer(entity_type):
|
||||
}
|
||||
return entity_map.get(entity_type, (None, None))
|
||||
|
||||
|
||||
class UserFavoriteSerializer(serializers.ModelSerializer):
|
||||
entity_data = serializers.SerializerMethodField()
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from django.utils import timezone
|
||||
from django.core.validators import URLValidator
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import IntegrityError
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework import serializers
|
||||
@@ -33,6 +34,9 @@ from plane.db.models import (
|
||||
IssueVote,
|
||||
IssueRelation,
|
||||
State,
|
||||
IssueVersion,
|
||||
IssueDescriptionVersion,
|
||||
ProjectMember,
|
||||
)
|
||||
|
||||
|
||||
@@ -107,14 +111,23 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
data["label_ids"] = label_ids if label_ids else []
|
||||
return data
|
||||
|
||||
def validate(self, data):
|
||||
def validate(self, attrs):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("target_date", None) is not None
|
||||
and data.get("start_date", None) > data.get("target_date", None)
|
||||
attrs.get("start_date", None) is not None
|
||||
and attrs.get("target_date", None) is not None
|
||||
and attrs.get("start_date", None) > attrs.get("target_date", None)
|
||||
):
|
||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||
return data
|
||||
|
||||
if attrs.get("assignee_ids", []):
|
||||
attrs["assignee_ids"] = ProjectMember.objects.filter(
|
||||
project_id=self.context["project_id"],
|
||||
role__gte=15,
|
||||
is_active=True,
|
||||
member_id__in=attrs["assignee_ids"],
|
||||
).values_list("member_id", flat=True)
|
||||
|
||||
return attrs
|
||||
|
||||
def create(self, validated_data):
|
||||
assignees = validated_data.pop("assignee_ids", None)
|
||||
@@ -132,47 +145,64 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
updated_by_id = issue.updated_by_id
|
||||
|
||||
if assignees is not None and len(assignees):
|
||||
IssueAssignee.objects.bulk_create(
|
||||
[
|
||||
IssueAssignee(
|
||||
assignee=user,
|
||||
try:
|
||||
IssueAssignee.objects.bulk_create(
|
||||
[
|
||||
IssueAssignee(
|
||||
assignee_id=assignee_id,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for assignee_id in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
else:
|
||||
# Then assign it to default assignee, if it is a valid assignee
|
||||
if (
|
||||
default_assignee_id is not None
|
||||
and ProjectMember.objects.filter(
|
||||
member_id=default_assignee_id,
|
||||
project_id=project_id,
|
||||
role__gte=15,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
try:
|
||||
IssueAssignee.objects.create(
|
||||
assignee_id=default_assignee_id,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for user in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
else:
|
||||
# Then assign it to default assignee
|
||||
if default_assignee_id is not None:
|
||||
IssueAssignee.objects.create(
|
||||
assignee_id=default_assignee_id,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
if labels is not None and len(labels):
|
||||
IssueLabel.objects.bulk_create(
|
||||
[
|
||||
IssueLabel(
|
||||
label=label,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
try:
|
||||
IssueLabel.objects.bulk_create(
|
||||
[
|
||||
IssueLabel(
|
||||
label=label,
|
||||
issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
return issue
|
||||
|
||||
@@ -188,37 +218,45 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
|
||||
if assignees is not None:
|
||||
IssueAssignee.objects.filter(issue=instance).delete()
|
||||
IssueAssignee.objects.bulk_create(
|
||||
[
|
||||
IssueAssignee(
|
||||
assignee=user,
|
||||
issue=instance,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for user in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
try:
|
||||
IssueAssignee.objects.bulk_create(
|
||||
[
|
||||
IssueAssignee(
|
||||
assignee_id=assignee_id,
|
||||
issue=instance,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for assignee_id in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
if labels is not None:
|
||||
IssueLabel.objects.filter(issue=instance).delete()
|
||||
IssueLabel.objects.bulk_create(
|
||||
[
|
||||
IssueLabel(
|
||||
label=label,
|
||||
issue=instance,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
try:
|
||||
IssueLabel.objects.bulk_create(
|
||||
[
|
||||
IssueLabel(
|
||||
label=label,
|
||||
issue=instance,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
# Time updation occues even when other related models are updated
|
||||
instance.updated_at = timezone.now()
|
||||
@@ -230,6 +268,20 @@ class IssueActivitySerializer(BaseSerializer):
|
||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
source_data = serializers.SerializerMethodField()
|
||||
|
||||
def get_source_data(self, obj):
|
||||
if (
|
||||
hasattr(obj, "issue")
|
||||
and hasattr(obj.issue, "source_data")
|
||||
and obj.issue.source_data
|
||||
):
|
||||
return {
|
||||
"source": obj.issue.source_data[0].source,
|
||||
"source_email": obj.issue.source_data[0].source_email,
|
||||
"extra": obj.issue.source_data[0].extra,
|
||||
}
|
||||
return None
|
||||
|
||||
class Meta:
|
||||
model = IssueActivity
|
||||
@@ -281,10 +333,26 @@ class IssueRelationSerializer(BaseSerializer):
|
||||
)
|
||||
name = serializers.CharField(source="related_issue.name", read_only=True)
|
||||
relation_type = serializers.CharField(read_only=True)
|
||||
state_id = serializers.UUIDField(source="related_issue.state.id", read_only=True)
|
||||
priority = serializers.CharField(source="related_issue.priority", read_only=True)
|
||||
assignee_ids = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = IssueRelation
|
||||
fields = ["id", "project_id", "sequence_id", "relation_type", "name"]
|
||||
fields = [
|
||||
"id",
|
||||
"project_id",
|
||||
"sequence_id",
|
||||
"relation_type",
|
||||
"name",
|
||||
"state_id",
|
||||
"priority",
|
||||
"assignee_ids",
|
||||
]
|
||||
read_only_fields = ["workspace", "project"]
|
||||
|
||||
|
||||
@@ -296,10 +364,26 @@ class RelatedIssueSerializer(BaseSerializer):
|
||||
sequence_id = serializers.IntegerField(source="issue.sequence_id", read_only=True)
|
||||
name = serializers.CharField(source="issue.name", read_only=True)
|
||||
relation_type = serializers.CharField(read_only=True)
|
||||
state_id = serializers.UUIDField(source="issue.state.id", read_only=True)
|
||||
priority = serializers.CharField(source="issue.priority", read_only=True)
|
||||
assignee_ids = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = IssueRelation
|
||||
fields = ["id", "project_id", "sequence_id", "relation_type", "name"]
|
||||
fields = [
|
||||
"id",
|
||||
"project_id",
|
||||
"sequence_id",
|
||||
"relation_type",
|
||||
"name",
|
||||
"state_id",
|
||||
"priority",
|
||||
"assignee_ids",
|
||||
]
|
||||
read_only_fields = ["workspace", "project"]
|
||||
|
||||
|
||||
@@ -470,6 +554,7 @@ class IssueAttachmentLiteSerializer(DynamicBaseSerializer):
|
||||
"asset",
|
||||
"attributes",
|
||||
# "issue_id",
|
||||
"created_by",
|
||||
"updated_at",
|
||||
"updated_by",
|
||||
"asset_url",
|
||||
@@ -667,3 +752,64 @@ class IssueSubscriberSerializer(BaseSerializer):
|
||||
model = IssueSubscriber
|
||||
fields = "__all__"
|
||||
read_only_fields = ["workspace", "project", "issue"]
|
||||
|
||||
|
||||
class IssueVersionDetailSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueVersion
|
||||
fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"parent",
|
||||
"state",
|
||||
"estimate_point",
|
||||
"name",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"assignees",
|
||||
"sequence_id",
|
||||
"labels",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"archived_at",
|
||||
"is_draft",
|
||||
"external_source",
|
||||
"external_id",
|
||||
"type",
|
||||
"cycle",
|
||||
"modules",
|
||||
"meta",
|
||||
"name",
|
||||
"last_saved_at",
|
||||
"owned_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = ["workspace", "project", "issue"]
|
||||
|
||||
|
||||
class IssueDescriptionVersionDetailSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueDescriptionVersion
|
||||
fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"description_binary",
|
||||
"description_html",
|
||||
"description_stripped",
|
||||
"description_json",
|
||||
"last_saved_at",
|
||||
"owned_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = ["workspace", "project", "issue"]
|
||||
|
||||
@@ -54,6 +54,8 @@ class PageSerializer(BaseSerializer):
|
||||
labels = validated_data.pop("labels", None)
|
||||
project_id = self.context["project_id"]
|
||||
owned_by_id = self.context["owned_by_id"]
|
||||
description = self.context["description"]
|
||||
description_binary = self.context["description_binary"]
|
||||
description_html = self.context["description_html"]
|
||||
|
||||
# Get the workspace id from the project
|
||||
@@ -62,6 +64,8 @@ class PageSerializer(BaseSerializer):
|
||||
# Create the page
|
||||
page = Page.objects.create(
|
||||
**validated_data,
|
||||
description=description,
|
||||
description_binary=description_binary,
|
||||
description_html=description_html,
|
||||
owned_by_id=owned_by_id,
|
||||
workspace_id=project.workspace_id,
|
||||
|
||||
@@ -90,17 +90,7 @@ class ProjectLiteSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class ProjectListSerializer(DynamicBaseSerializer):
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
archived_issues = serializers.IntegerField(read_only=True)
|
||||
archived_sub_issues = serializers.IntegerField(read_only=True)
|
||||
draft_issues = serializers.IntegerField(read_only=True)
|
||||
draft_sub_issues = serializers.IntegerField(read_only=True)
|
||||
sub_issues = serializers.IntegerField(read_only=True)
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
total_members = serializers.IntegerField(read_only=True)
|
||||
total_cycles = serializers.IntegerField(read_only=True)
|
||||
total_modules = serializers.IntegerField(read_only=True)
|
||||
is_member = serializers.BooleanField(read_only=True)
|
||||
sort_order = serializers.FloatField(read_only=True)
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
anchor = serializers.CharField(read_only=True)
|
||||
@@ -113,14 +103,9 @@ class ProjectListSerializer(DynamicBaseSerializer):
|
||||
if project_members is not None:
|
||||
# Filter members by the project ID
|
||||
return [
|
||||
{
|
||||
"id": member.id,
|
||||
"member_id": member.member_id,
|
||||
"member__display_name": member.member.display_name,
|
||||
"member__avatar": member.member.avatar,
|
||||
"member__avatar_url": member.member.avatar_url,
|
||||
}
|
||||
member.member_id
|
||||
for member in project_members
|
||||
if member.is_active and not member.member.is_bot
|
||||
]
|
||||
return []
|
||||
|
||||
@@ -134,10 +119,6 @@ class ProjectDetailSerializer(BaseSerializer):
|
||||
default_assignee = UserLiteSerializer(read_only=True)
|
||||
project_lead = UserLiteSerializer(read_only=True)
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
total_members = serializers.IntegerField(read_only=True)
|
||||
total_cycles = serializers.IntegerField(read_only=True)
|
||||
total_modules = serializers.IntegerField(read_only=True)
|
||||
is_member = serializers.BooleanField(read_only=True)
|
||||
sort_order = serializers.FloatField(read_only=True)
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
anchor = serializers.CharField(read_only=True)
|
||||
|
||||
@@ -116,7 +116,7 @@ class WebhookSerializer(DynamicBaseSerializer):
|
||||
class Meta:
|
||||
model = Webhook
|
||||
fields = "__all__"
|
||||
read_only_fields = ["workspace", "secret_key"]
|
||||
read_only_fields = ["workspace", "secret_key", "deleted_at"]
|
||||
|
||||
|
||||
class WebhookLogSerializer(DynamicBaseSerializer):
|
||||
|
||||
@@ -1,25 +1,40 @@
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer, DynamicBaseSerializer
|
||||
from .user import UserLiteSerializer, UserAdminLiteSerializer
|
||||
|
||||
|
||||
from plane.db.models import (
|
||||
Workspace,
|
||||
WorkspaceMember,
|
||||
WorkspaceMemberInvite,
|
||||
WorkspaceTheme,
|
||||
WorkspaceUserProperties,
|
||||
WorkspaceUserLink,
|
||||
UserRecentVisit,
|
||||
Issue,
|
||||
Page,
|
||||
Project,
|
||||
ProjectMember,
|
||||
WorkspaceHomePreference,
|
||||
Sticky,
|
||||
WorkspaceUserPreference,
|
||||
)
|
||||
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
|
||||
|
||||
# Django imports
|
||||
from django.core.validators import URLValidator
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
|
||||
class WorkSpaceSerializer(DynamicBaseSerializer):
|
||||
owner = UserLiteSerializer(read_only=True)
|
||||
total_members = serializers.IntegerField(read_only=True)
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
logo_url = serializers.CharField(read_only=True)
|
||||
role = serializers.IntegerField(read_only=True)
|
||||
|
||||
def validate_slug(self, value):
|
||||
# Check if the slug is restricted
|
||||
@@ -44,7 +59,7 @@ class WorkSpaceSerializer(DynamicBaseSerializer):
|
||||
class WorkspaceLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Workspace
|
||||
fields = ["name", "slug", "id"]
|
||||
fields = ["name", "slug", "id", "logo_url"]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
@@ -75,9 +90,11 @@ class WorkspaceMemberAdminSerializer(DynamicBaseSerializer):
|
||||
|
||||
|
||||
class WorkSpaceMemberInviteSerializer(BaseSerializer):
|
||||
workspace = WorkSpaceSerializer(read_only=True)
|
||||
total_members = serializers.IntegerField(read_only=True)
|
||||
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||
invite_link = serializers.SerializerMethodField()
|
||||
|
||||
def get_invite_link(self, obj):
|
||||
return f"/workspace-invitations/?invitation_id={obj.id}&email={obj.email}&slug={obj.workspace.slug}"
|
||||
|
||||
class Meta:
|
||||
model = WorkspaceMemberInvite
|
||||
@@ -91,6 +108,7 @@ class WorkSpaceMemberInviteSerializer(BaseSerializer):
|
||||
"responded_at",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"invite_link",
|
||||
]
|
||||
|
||||
|
||||
@@ -106,3 +124,183 @@ class WorkspaceUserPropertiesSerializer(BaseSerializer):
|
||||
model = WorkspaceUserProperties
|
||||
fields = "__all__"
|
||||
read_only_fields = ["workspace", "user"]
|
||||
|
||||
|
||||
class WorkspaceUserLinkSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = WorkspaceUserLink
|
||||
fields = "__all__"
|
||||
read_only_fields = ["workspace", "owner"]
|
||||
|
||||
def to_internal_value(self, data):
|
||||
url = data.get("url", "")
|
||||
if url and not url.startswith(("http://", "https://")):
|
||||
data["url"] = "http://" + url
|
||||
|
||||
return super().to_internal_value(data)
|
||||
|
||||
def validate_url(self, value):
|
||||
url_validator = URLValidator()
|
||||
try:
|
||||
url_validator(value)
|
||||
except ValidationError:
|
||||
raise serializers.ValidationError({"error": "Invalid URL format."})
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def create(self, validated_data):
|
||||
# Filtering the WorkspaceUserLink with the given url to check if the link already exists.
|
||||
|
||||
url = validated_data.get("url")
|
||||
|
||||
workspace_user_link = WorkspaceUserLink.objects.filter(
|
||||
url=url,
|
||||
workspace_id=validated_data.get("workspace_id"),
|
||||
owner_id=validated_data.get("owner_id")
|
||||
)
|
||||
|
||||
if workspace_user_link.exists():
|
||||
raise serializers.ValidationError(
|
||||
{"error": "URL already exists for this workspace and owner"}
|
||||
)
|
||||
|
||||
return super().create(validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
# Filtering the WorkspaceUserLink with the given url to check if the link already exists.
|
||||
|
||||
url = validated_data.get("url")
|
||||
|
||||
workspace_user_link = WorkspaceUserLink.objects.filter(
|
||||
url=url,
|
||||
workspace_id=instance.workspace_id,
|
||||
owner=instance.owner
|
||||
)
|
||||
|
||||
if workspace_user_link.exclude(pk=instance.id).exists():
|
||||
raise serializers.ValidationError(
|
||||
{"error": "URL already exists for this workspace and owner"}
|
||||
)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
class IssueRecentVisitSerializer(serializers.ModelSerializer):
|
||||
project_identifier = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"state",
|
||||
"priority",
|
||||
"assignees",
|
||||
"type",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"project_identifier",
|
||||
]
|
||||
|
||||
def get_project_identifier(self, obj):
|
||||
project = obj.project
|
||||
|
||||
return project.identifier if project else None
|
||||
|
||||
|
||||
class ProjectRecentVisitSerializer(serializers.ModelSerializer):
|
||||
project_members = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = ["id", "name", "logo_props", "project_members", "identifier"]
|
||||
|
||||
def get_project_members(self, obj):
|
||||
members = ProjectMember.objects.filter(
|
||||
project_id=obj.id, member__is_bot=False, is_active=True
|
||||
).values_list("member", flat=True)
|
||||
|
||||
return members
|
||||
|
||||
|
||||
class PageRecentVisitSerializer(serializers.ModelSerializer):
|
||||
project_id = serializers.SerializerMethodField()
|
||||
project_identifier = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Page
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"logo_props",
|
||||
"project_id",
|
||||
"owned_by",
|
||||
"project_identifier",
|
||||
]
|
||||
|
||||
def get_project_id(self, obj):
|
||||
return (
|
||||
obj.project_id
|
||||
if hasattr(obj, "project_id")
|
||||
else obj.projects.values_list("id", flat=True).first()
|
||||
)
|
||||
|
||||
def get_project_identifier(self, obj):
|
||||
project = obj.projects.first()
|
||||
|
||||
return project.identifier if project else None
|
||||
|
||||
|
||||
def get_entity_model_and_serializer(entity_type):
|
||||
entity_map = {
|
||||
"issue": (Issue, IssueRecentVisitSerializer),
|
||||
"page": (Page, PageRecentVisitSerializer),
|
||||
"project": (Project, ProjectRecentVisitSerializer),
|
||||
}
|
||||
return entity_map.get(entity_type, (None, None))
|
||||
|
||||
|
||||
class WorkspaceRecentVisitSerializer(BaseSerializer):
|
||||
entity_data = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = UserRecentVisit
|
||||
fields = ["id", "entity_name", "entity_identifier", "entity_data", "visited_at"]
|
||||
read_only_fields = ["workspace", "owner", "created_by", "updated_by"]
|
||||
|
||||
def get_entity_data(self, obj):
|
||||
entity_name = obj.entity_name
|
||||
entity_identifier = obj.entity_identifier
|
||||
|
||||
entity_model, entity_serializer = get_entity_model_and_serializer(entity_name)
|
||||
|
||||
if entity_model and entity_serializer:
|
||||
try:
|
||||
entity = entity_model.objects.get(pk=entity_identifier)
|
||||
|
||||
return entity_serializer(entity).data
|
||||
except entity_model.DoesNotExist:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
class WorkspaceHomePreferenceSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = WorkspaceHomePreference
|
||||
fields = ["key", "is_enabled", "sort_order"]
|
||||
read_only_fields = ["workspace", "created_by", "updated_by"]
|
||||
|
||||
|
||||
class StickySerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Sticky
|
||||
fields = "__all__"
|
||||
read_only_fields = ["workspace", "owner"]
|
||||
extra_kwargs = {"name": {"required": False}}
|
||||
|
||||
|
||||
class WorkspaceUserPreferenceSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = WorkspaceUserPreference
|
||||
fields = ["key", "is_pinned", "sort_order"]
|
||||
read_only_fields = ["workspace", "created_by", "updated_by"]
|
||||
|
||||
@@ -2,7 +2,6 @@ from .analytic import urlpatterns as analytic_urls
|
||||
from .api import urlpatterns as api_urls
|
||||
from .asset import urlpatterns as asset_urls
|
||||
from .cycle import urlpatterns as cycle_urls
|
||||
from .dashboard import urlpatterns as dashboard_urls
|
||||
from .estimate import urlpatterns as estimate_urls
|
||||
from .external import urlpatterns as external_urls
|
||||
from .intake import urlpatterns as intake_urls
|
||||
@@ -23,7 +22,6 @@ urlpatterns = [
|
||||
*analytic_urls,
|
||||
*asset_urls,
|
||||
*cycle_urls,
|
||||
*dashboard_urls,
|
||||
*estimate_urls,
|
||||
*external_urls,
|
||||
*intake_urls,
|
||||
|
||||
@@ -7,6 +7,7 @@ from plane.app.views import (
|
||||
SavedAnalyticEndpoint,
|
||||
ExportAnalyticsEndpoint,
|
||||
DefaultAnalyticsEndpoint,
|
||||
ProjectStatsEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -43,4 +44,9 @@ urlpatterns = [
|
||||
DefaultAnalyticsEndpoint.as_view(),
|
||||
name="default-analytics",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/project-stats/",
|
||||
ProjectStatsEndpoint.as_view(),
|
||||
name="project-analytics",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.app.views import DashboardEndpoint, WidgetsEndpoint
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/dashboard/",
|
||||
DashboardEndpoint.as_view(),
|
||||
name="dashboard",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/dashboard/<uuid:dashboard_id>/",
|
||||
DashboardEndpoint.as_view(),
|
||||
name="dashboard",
|
||||
),
|
||||
path(
|
||||
"dashboard/<uuid:dashboard_id>/widgets/<uuid:widget_id>/",
|
||||
WidgetsEndpoint.as_view(),
|
||||
name="widgets",
|
||||
),
|
||||
]
|
||||
@@ -24,6 +24,10 @@ from plane.app.views import (
|
||||
IssueDetailEndpoint,
|
||||
IssueAttachmentV2Endpoint,
|
||||
IssueBulkUpdateDateEndpoint,
|
||||
IssueVersionEndpoint,
|
||||
IssueDescriptionVersionEndpoint,
|
||||
IssueMetaEndpoint,
|
||||
IssueDetailIdentifierEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
@@ -256,4 +260,34 @@ urlpatterns = [
|
||||
IssueBulkUpdateDateEndpoint.as_view(),
|
||||
name="project-issue-dates",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/versions/",
|
||||
IssueVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/versions/<uuid:pk>/",
|
||||
IssueVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/description-versions/",
|
||||
IssueDescriptionVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/description-versions/<uuid:pk>/",
|
||||
IssueDescriptionVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/meta/",
|
||||
IssueMetaEndpoint.as_view(),
|
||||
name="issue-meta",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/work-items/<str:project_identifier>-<str:issue_identifier>/",
|
||||
IssueDetailIdentifierEndpoint.as_view(),
|
||||
name="issue-detail-identifier",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -8,6 +8,7 @@ from plane.app.views import (
|
||||
SubPagesEndpoint,
|
||||
PagesDescriptionViewSet,
|
||||
PageVersionEndpoint,
|
||||
PageDuplicateEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -78,4 +79,9 @@ urlpatterns = [
|
||||
PageVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/duplicate/",
|
||||
PageDuplicateEndpoint.as_view(),
|
||||
name="page-duplicate",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -23,6 +23,11 @@ urlpatterns = [
|
||||
ProjectViewSet.as_view({"get": "list", "post": "create"}),
|
||||
name="project",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/details/",
|
||||
ProjectViewSet.as_view({"get": "list_detail"}),
|
||||
name="project",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:pk>/",
|
||||
ProjectViewSet.as_view(
|
||||
|
||||
@@ -16,7 +16,7 @@ urlpatterns = [
|
||||
name="project-issue-search",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/entity-search/",
|
||||
"workspaces/<str:slug>/entity-search/",
|
||||
SearchEndpoint.as_view(),
|
||||
name="entity-search",
|
||||
),
|
||||
|
||||
@@ -27,6 +27,11 @@ from plane.app.views import (
|
||||
WorkspaceFavoriteEndpoint,
|
||||
WorkspaceFavoriteGroupEndpoint,
|
||||
WorkspaceDraftIssueViewSet,
|
||||
QuickLinkViewSet,
|
||||
UserRecentVisitViewSet,
|
||||
WorkspaceHomePreferenceViewSet,
|
||||
WorkspaceStickyViewSet,
|
||||
WorkspaceUserPreferenceViewSet,
|
||||
)
|
||||
|
||||
|
||||
@@ -213,4 +218,56 @@ urlpatterns = [
|
||||
WorkspaceDraftIssueViewSet.as_view({"post": "create_draft_to_issue"}),
|
||||
name="workspace-drafts-issues",
|
||||
),
|
||||
# quick link
|
||||
path(
|
||||
"workspaces/<str:slug>/quick-links/",
|
||||
QuickLinkViewSet.as_view({"get": "list", "post": "create"}),
|
||||
name="workspace-quick-links",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/quick-links/<uuid:pk>/",
|
||||
QuickLinkViewSet.as_view(
|
||||
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="workspace-quick-links",
|
||||
),
|
||||
# Widgets
|
||||
path(
|
||||
"workspaces/<str:slug>/home-preferences/",
|
||||
WorkspaceHomePreferenceViewSet.as_view(),
|
||||
name="workspace-home-preference",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/home-preferences/<str:key>/",
|
||||
WorkspaceHomePreferenceViewSet.as_view(),
|
||||
name="workspace-home-preference",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/recent-visits/",
|
||||
UserRecentVisitViewSet.as_view({"get": "list"}),
|
||||
name="workspace-recent-visits",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/stickies/",
|
||||
WorkspaceStickyViewSet.as_view({"get": "list", "post": "create"}),
|
||||
name="workspace-sticky",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/stickies/<uuid:pk>/",
|
||||
WorkspaceStickyViewSet.as_view(
|
||||
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="workspace-sticky",
|
||||
),
|
||||
# User Preference
|
||||
path(
|
||||
"workspaces/<str:slug>/sidebar-preferences/",
|
||||
WorkspaceUserPreferenceViewSet.as_view(),
|
||||
name="workspace-user-preference",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/sidebar-preferences/<str:key>/",
|
||||
WorkspaceUserPreferenceViewSet.as_view(),
|
||||
name="workspace-user-preference",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -41,10 +41,14 @@ from .workspace.base import (
|
||||
|
||||
from .workspace.draft import WorkspaceDraftIssueViewSet
|
||||
|
||||
from .workspace.home import WorkspaceHomePreferenceViewSet
|
||||
|
||||
from .workspace.favorite import (
|
||||
WorkspaceFavoriteEndpoint,
|
||||
WorkspaceFavoriteGroupEndpoint,
|
||||
)
|
||||
from .workspace.recent_visit import UserRecentVisitViewSet
|
||||
from .workspace.user_preference import WorkspaceUserPreferenceViewSet
|
||||
|
||||
from .workspace.member import (
|
||||
WorkSpaceMemberViewSet,
|
||||
@@ -72,6 +76,8 @@ from .workspace.user import (
|
||||
from .workspace.estimate import WorkspaceEstimatesEndpoint
|
||||
from .workspace.module import WorkspaceModulesEndpoint
|
||||
from .workspace.cycle import WorkspaceCyclesEndpoint
|
||||
from .workspace.quick_link import QuickLinkViewSet
|
||||
from .workspace.sticky import WorkspaceStickyViewSet
|
||||
|
||||
from .state.base import StateViewSet
|
||||
from .view.base import (
|
||||
@@ -110,6 +116,8 @@ from .issue.base import (
|
||||
IssuePaginatedViewSet,
|
||||
IssueDetailEndpoint,
|
||||
IssueBulkUpdateDateEndpoint,
|
||||
IssueMetaEndpoint,
|
||||
IssueDetailIdentifierEndpoint,
|
||||
)
|
||||
|
||||
from .issue.activity import IssueActivityEndpoint
|
||||
@@ -136,6 +144,8 @@ from .issue.sub_issue import SubIssuesEndpoint
|
||||
|
||||
from .issue.subscriber import IssueSubscriberViewSet
|
||||
|
||||
from .issue.version import IssueVersionEndpoint, IssueDescriptionVersionEndpoint
|
||||
|
||||
from .module.base import (
|
||||
ModuleViewSet,
|
||||
ModuleLinkViewSet,
|
||||
@@ -155,6 +165,7 @@ from .page.base import (
|
||||
PageLogEndpoint,
|
||||
SubPagesEndpoint,
|
||||
PagesDescriptionViewSet,
|
||||
PageDuplicateEndpoint,
|
||||
)
|
||||
from .page.version import PageVersionEndpoint
|
||||
|
||||
@@ -181,6 +192,7 @@ from .analytic.base import (
|
||||
SavedAnalyticEndpoint,
|
||||
ExportAnalyticsEndpoint,
|
||||
DefaultAnalyticsEndpoint,
|
||||
ProjectStatsEndpoint,
|
||||
)
|
||||
|
||||
from .notification.base import (
|
||||
@@ -198,8 +210,6 @@ from .webhook.base import (
|
||||
WebhookSecretRegenerateEndpoint,
|
||||
)
|
||||
|
||||
from .dashboard.base import DashboardEndpoint, WidgetsEndpoint
|
||||
|
||||
from .error_404 import custom_404_view
|
||||
|
||||
from .notification.base import MarkAllReadNotificationViewSet
|
||||
|
||||
@@ -3,7 +3,7 @@ from django.db.models import Count, F, Sum, Q
|
||||
from django.db.models.functions import ExtractMonth
|
||||
from django.utils import timezone
|
||||
from django.db.models.functions import Concat
|
||||
from django.db.models import Case, When, Value
|
||||
from django.db.models import Case, When, Value, OuterRef, Func
|
||||
from django.db import models
|
||||
|
||||
# Third party imports
|
||||
@@ -15,7 +15,16 @@ from plane.app.permissions import WorkSpaceAdminPermission
|
||||
from plane.app.serializers import AnalyticViewSerializer
|
||||
from plane.app.views.base import BaseAPIView, BaseViewSet
|
||||
from plane.bgtasks.analytic_plot_export import analytic_export_task
|
||||
from plane.db.models import AnalyticView, Issue, Workspace
|
||||
from plane.db.models import (
|
||||
AnalyticView,
|
||||
Issue,
|
||||
Workspace,
|
||||
Project,
|
||||
ProjectMember,
|
||||
Cycle,
|
||||
Module,
|
||||
)
|
||||
|
||||
from plane.utils.analytics_plot import build_graph_plot
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
@@ -441,3 +450,74 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class ProjectStatsEndpoint(BaseAPIView):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE")
|
||||
def get(self, request, slug):
|
||||
fields = request.GET.get("fields", "").split(",")
|
||||
project_ids = request.GET.get("project_ids", "")
|
||||
|
||||
valid_fields = {
|
||||
"total_issues",
|
||||
"completed_issues",
|
||||
"total_members",
|
||||
"total_cycles",
|
||||
"total_modules",
|
||||
}
|
||||
requested_fields = set(filter(None, fields)) & valid_fields
|
||||
|
||||
if not requested_fields:
|
||||
requested_fields = valid_fields
|
||||
|
||||
projects = Project.objects.filter(workspace__slug=slug)
|
||||
if project_ids:
|
||||
projects = projects.filter(id__in=project_ids.split(","))
|
||||
|
||||
annotations = {}
|
||||
if "total_issues" in requested_fields:
|
||||
annotations["total_issues"] = (
|
||||
Issue.issue_objects.filter(project_id=OuterRef("pk"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
|
||||
if "completed_issues" in requested_fields:
|
||||
annotations["completed_issues"] = (
|
||||
Issue.issue_objects.filter(
|
||||
project_id=OuterRef("pk"), state__group="completed"
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
|
||||
if "total_cycles" in requested_fields:
|
||||
annotations["total_cycles"] = (
|
||||
Cycle.objects.filter(project_id=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
|
||||
if "total_modules" in requested_fields:
|
||||
annotations["total_modules"] = (
|
||||
Module.objects.filter(project_id=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
|
||||
if "total_members" in requested_fields:
|
||||
annotations["total_members"] = (
|
||||
ProjectMember.objects.filter(
|
||||
project_id=OuterRef("id"), member__is_bot=False, is_active=True
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
|
||||
projects = projects.annotate(**annotations).values("id", *requested_fields)
|
||||
return Response(projects, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -5,6 +5,7 @@ import uuid
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.utils import timezone
|
||||
from django.db import IntegrityError
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
@@ -679,15 +680,30 @@ class ProjectBulkAssetEndpoint(BaseAPIView):
|
||||
[self.save_project_cover(asset, project_id) for asset in assets]
|
||||
|
||||
if asset.entity_type == FileAsset.EntityTypeContext.ISSUE_DESCRIPTION:
|
||||
assets.update(issue_id=entity_id)
|
||||
# For some cases, the bulk api is called after the issue is deleted creating
|
||||
# an integrity error
|
||||
try:
|
||||
assets.update(issue_id=entity_id)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
if asset.entity_type == FileAsset.EntityTypeContext.COMMENT_DESCRIPTION:
|
||||
assets.update(comment_id=entity_id)
|
||||
# For some cases, the bulk api is called after the comment is deleted
|
||||
# creating an integrity error
|
||||
try:
|
||||
assets.update(comment_id=entity_id)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
if asset.entity_type == FileAsset.EntityTypeContext.PAGE_DESCRIPTION:
|
||||
assets.update(page_id=entity_id)
|
||||
|
||||
if asset.entity_type == FileAsset.EntityTypeContext.DRAFT_ISSUE_DESCRIPTION:
|
||||
assets.update(draft_issue_id=entity_id)
|
||||
# For some cases, the bulk api is called after the draft issue is deleted
|
||||
# creating an integrity error
|
||||
try:
|
||||
assets.update(draft_issue_id=entity_id)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -47,18 +47,14 @@ from plane.db.models import (
|
||||
User,
|
||||
Project,
|
||||
ProjectMember,
|
||||
UserRecentVisit,
|
||||
)
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
|
||||
# Module imports
|
||||
from plane.utils.host import base_host
|
||||
from .. import BaseAPIView, BaseViewSet
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from plane.utils.timezone_converter import (
|
||||
convert_utc_to_project_timezone,
|
||||
convert_to_utc,
|
||||
user_timezone_converter,
|
||||
)
|
||||
from plane.utils.timezone_converter import convert_to_utc, user_timezone_converter
|
||||
|
||||
|
||||
class CycleViewSet(BaseViewSet):
|
||||
@@ -136,6 +132,18 @@ class CycleViewSet(BaseViewSet):
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cancelled_issues=Count(
|
||||
"issue_cycle__issue__id",
|
||||
distinct=True,
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group__in=["cancelled"],
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
status=Case(
|
||||
When(
|
||||
@@ -143,10 +151,7 @@ class CycleViewSet(BaseViewSet):
|
||||
& Q(end_date__gte=current_time_in_utc),
|
||||
then=Value("CURRENT"),
|
||||
),
|
||||
When(
|
||||
start_date__gt=current_time_in_utc,
|
||||
then=Value("UPCOMING"),
|
||||
),
|
||||
When(start_date__gt=current_time_in_utc, then=Value("UPCOMING")),
|
||||
When(end_date__lt=current_time_in_utc, then=Value("COMPLETED")),
|
||||
When(
|
||||
Q(start_date__isnull=True) & Q(end_date__isnull=True),
|
||||
@@ -221,6 +226,7 @@ class CycleViewSet(BaseViewSet):
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"completed_issues",
|
||||
"cancelled_issues",
|
||||
"assignee_ids",
|
||||
"status",
|
||||
"version",
|
||||
@@ -252,6 +258,7 @@ class CycleViewSet(BaseViewSet):
|
||||
# meta fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
"completed_issues",
|
||||
"assignee_ids",
|
||||
"status",
|
||||
@@ -259,7 +266,9 @@ class CycleViewSet(BaseViewSet):
|
||||
"created_by",
|
||||
)
|
||||
datetime_fields = ["start_date", "end_date"]
|
||||
data = user_timezone_converter(data, datetime_fields, request.user.user_timezone)
|
||||
data = user_timezone_converter(
|
||||
data, datetime_fields, project_timezone
|
||||
)
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
@@ -271,7 +280,9 @@ class CycleViewSet(BaseViewSet):
|
||||
request.data.get("start_date", None) is not None
|
||||
and request.data.get("end_date", None) is not None
|
||||
):
|
||||
serializer = CycleWriteSerializer(data=request.data)
|
||||
serializer = CycleWriteSerializer(
|
||||
data=request.data, context={"project_id": project_id}
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save(project_id=project_id, owned_by=request.user)
|
||||
cycle = (
|
||||
@@ -306,6 +317,15 @@ class CycleViewSet(BaseViewSet):
|
||||
.first()
|
||||
)
|
||||
|
||||
# Fetch the project timezone
|
||||
project = Project.objects.get(id=self.kwargs.get("project_id"))
|
||||
project_timezone = project.timezone
|
||||
|
||||
datetime_fields = ["start_date", "end_date"]
|
||||
cycle = user_timezone_converter(
|
||||
cycle, datetime_fields, project_timezone
|
||||
)
|
||||
|
||||
# Send the model activity
|
||||
model_activity.delay(
|
||||
model_name="cycle",
|
||||
@@ -314,7 +334,7 @@ class CycleViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(cycle, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -358,7 +378,9 @@ class CycleViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = CycleWriteSerializer(cycle, data=request.data, partial=True)
|
||||
serializer = CycleWriteSerializer(
|
||||
cycle, data=request.data, partial=True, context={"project_id": project_id}
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
cycle = queryset.values(
|
||||
@@ -388,6 +410,15 @@ class CycleViewSet(BaseViewSet):
|
||||
"created_by",
|
||||
).first()
|
||||
|
||||
# Fetch the project timezone
|
||||
project = Project.objects.get(id=self.kwargs.get("project_id"))
|
||||
project_timezone = project.timezone
|
||||
|
||||
datetime_fields = ["start_date", "end_date"]
|
||||
cycle = user_timezone_converter(
|
||||
cycle, datetime_fields, project_timezone
|
||||
)
|
||||
|
||||
# Send the model activity
|
||||
model_activity.delay(
|
||||
model_name="cycle",
|
||||
@@ -396,7 +427,7 @@ class CycleViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(cycle, status=status.HTTP_200_OK)
|
||||
@@ -456,8 +487,11 @@ class CycleViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
queryset = queryset.first()
|
||||
# Fetch the project timezone
|
||||
project = Project.objects.get(id=self.kwargs.get("project_id"))
|
||||
project_timezone = project.timezone
|
||||
datetime_fields = ["start_date", "end_date"]
|
||||
data = user_timezone_converter(data, datetime_fields, request.user.user_timezone)
|
||||
data = user_timezone_converter(data, datetime_fields, project_timezone)
|
||||
|
||||
recent_visited_task.delay(
|
||||
slug=slug,
|
||||
@@ -506,7 +540,7 @@ class CycleViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
# TODO: Soft delete the cycle break the onetoone relationship with cycle issue
|
||||
cycle.delete()
|
||||
@@ -518,6 +552,13 @@ class CycleViewSet(BaseViewSet):
|
||||
entity_identifier=pk,
|
||||
project_id=project_id,
|
||||
).delete()
|
||||
# Delete the cycle from recent visits
|
||||
UserRecentVisit.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
entity_identifier=pk,
|
||||
entity_name="cycle",
|
||||
).delete(soft=False)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@@ -533,8 +574,17 @@ class CycleDateCheckEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
start_date = convert_to_utc(str(start_date), project_id, is_start_date=True)
|
||||
end_date = convert_to_utc(str(end_date), project_id)
|
||||
is_start_date_end_date_equal = (
|
||||
True if str(start_date) == str(end_date) else False
|
||||
)
|
||||
start_date = convert_to_utc(
|
||||
date=str(start_date), project_id=project_id, is_start_date=True
|
||||
)
|
||||
end_date = convert_to_utc(
|
||||
date=str(end_date),
|
||||
project_id=project_id,
|
||||
is_start_date_end_date_equal=is_start_date_end_date_equal,
|
||||
)
|
||||
|
||||
# Check if any cycle intersects in the given interval
|
||||
cycles = Cycle.objects.filter(
|
||||
@@ -1029,7 +1079,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -27,7 +27,7 @@ from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPaginator
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class CycleIssueViewSet(BaseViewSet):
|
||||
serializer_class = CycleIssueSerializer
|
||||
@@ -291,7 +291,7 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
|
||||
|
||||
@@ -317,7 +317,7 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
cycle_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -1,806 +0,0 @@
|
||||
# Django imports
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import (
|
||||
Case,
|
||||
CharField,
|
||||
Count,
|
||||
Exists,
|
||||
F,
|
||||
Func,
|
||||
IntegerField,
|
||||
JSONField,
|
||||
OuterRef,
|
||||
Prefetch,
|
||||
Q,
|
||||
Subquery,
|
||||
UUIDField,
|
||||
Value,
|
||||
When,
|
||||
)
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils import timezone
|
||||
from rest_framework import status
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
|
||||
from plane.app.serializers import (
|
||||
DashboardSerializer,
|
||||
IssueActivitySerializer,
|
||||
IssueSerializer,
|
||||
WidgetSerializer,
|
||||
)
|
||||
from plane.db.models import (
|
||||
Dashboard,
|
||||
DashboardWidget,
|
||||
Issue,
|
||||
IssueActivity,
|
||||
FileAsset,
|
||||
IssueLink,
|
||||
IssueRelation,
|
||||
Project,
|
||||
Widget,
|
||||
WorkspaceMember,
|
||||
CycleIssue,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
# Module imports
|
||||
from .. import BaseAPIView
|
||||
|
||||
|
||||
def dashboard_overview_stats(self, request, slug):
|
||||
assigned_issues = (
|
||||
Issue.issue_objects.filter(
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
workspace__slug=slug,
|
||||
assignees__in=[request.user],
|
||||
)
|
||||
.filter(
|
||||
Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=True,
|
||||
)
|
||||
| Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=False,
|
||||
created_by=self.request.user,
|
||||
)
|
||||
|
|
||||
# For other roles (role < 5), show all issues
|
||||
Q(project__project_projectmember__role__gt=5),
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.count()
|
||||
)
|
||||
|
||||
pending_issues_count = (
|
||||
Issue.issue_objects.filter(
|
||||
~Q(state__group__in=["completed", "cancelled"]),
|
||||
target_date__lt=timezone.now().date(),
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
workspace__slug=slug,
|
||||
assignees__in=[request.user],
|
||||
)
|
||||
.filter(
|
||||
Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=True,
|
||||
)
|
||||
| Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=False,
|
||||
created_by=self.request.user,
|
||||
)
|
||||
|
|
||||
# For other roles (role < 5), show all issues
|
||||
Q(project__project_projectmember__role__gt=5),
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.count()
|
||||
)
|
||||
|
||||
created_issues_count = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
created_by_id=request.user.id,
|
||||
)
|
||||
.filter(
|
||||
Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=True,
|
||||
)
|
||||
| Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=False,
|
||||
created_by=self.request.user,
|
||||
)
|
||||
|
|
||||
# For other roles (role < 5), show all issues
|
||||
Q(project__project_projectmember__role__gt=5),
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.count()
|
||||
)
|
||||
|
||||
completed_issues_count = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
assignees__in=[request.user],
|
||||
state__group="completed",
|
||||
)
|
||||
.filter(
|
||||
Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=True,
|
||||
)
|
||||
| Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=False,
|
||||
created_by=self.request.user,
|
||||
)
|
||||
|
|
||||
# For other roles (role < 5), show all issues
|
||||
Q(project__project_projectmember__role__gt=5),
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.count()
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"assigned_issues_count": assigned_issues,
|
||||
"pending_issues_count": pending_issues_count,
|
||||
"completed_issues_count": completed_issues_count,
|
||||
"created_issues_count": created_issues_count,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
def dashboard_assigned_issues(self, request, slug):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issue_type = request.GET.get("issue_type", None)
|
||||
|
||||
# get all the assigned issues
|
||||
assigned_issues = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
assignees__in=[request.user],
|
||||
)
|
||||
.filter(**filters)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels", "issue_module__module")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_relation",
|
||||
queryset=IssueRelation.objects.select_related(
|
||||
"related_issue"
|
||||
).select_related("issue"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cycle_id=Subquery(
|
||||
CycleIssue.objects.filter(
|
||||
issue=OuterRef("id"), deleted_at__isnull=True
|
||||
).values("cycle_id")[:1]
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=FileAsset.objects.filter(
|
||||
issue_id=OuterRef("id"),
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=Q(
|
||||
~Q(labels__id__isnull=True)
|
||||
& Q(label_issue__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=Q(
|
||||
~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True)
|
||||
& Q(issue_assignee__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=Q(
|
||||
~Q(issue_module__module_id__isnull=True)
|
||||
& Q(issue_module__module__archived_at__isnull=True)
|
||||
& Q(issue_module__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
if WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug, member=request.user, role=5, is_active=True
|
||||
).exists():
|
||||
assigned_issues = assigned_issues.filter(created_by=request.user)
|
||||
|
||||
# Priority Ordering
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
assigned_issues = assigned_issues.annotate(
|
||||
priority_order=Case(
|
||||
*[When(priority=p, then=Value(i)) for i, p in enumerate(priority_order)],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
|
||||
if issue_type == "pending":
|
||||
pending_issues_count = assigned_issues.filter(
|
||||
state__group__in=["backlog", "started", "unstarted"]
|
||||
).count()
|
||||
pending_issues = assigned_issues.filter(
|
||||
state__group__in=["backlog", "started", "unstarted"]
|
||||
)[:5]
|
||||
return Response(
|
||||
{
|
||||
"issues": IssueSerializer(
|
||||
pending_issues, many=True, expand=self.expand
|
||||
).data,
|
||||
"count": pending_issues_count,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
if issue_type == "completed":
|
||||
completed_issues_count = assigned_issues.filter(
|
||||
state__group__in=["completed"]
|
||||
).count()
|
||||
completed_issues = assigned_issues.filter(state__group__in=["completed"])[:5]
|
||||
return Response(
|
||||
{
|
||||
"issues": IssueSerializer(
|
||||
completed_issues, many=True, expand=self.expand
|
||||
).data,
|
||||
"count": completed_issues_count,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
if issue_type == "overdue":
|
||||
overdue_issues_count = assigned_issues.filter(
|
||||
state__group__in=["backlog", "unstarted", "started"],
|
||||
target_date__lt=timezone.now(),
|
||||
).count()
|
||||
overdue_issues = assigned_issues.filter(
|
||||
state__group__in=["backlog", "unstarted", "started"],
|
||||
target_date__lt=timezone.now(),
|
||||
)[:5]
|
||||
return Response(
|
||||
{
|
||||
"issues": IssueSerializer(
|
||||
overdue_issues, many=True, expand=self.expand
|
||||
).data,
|
||||
"count": overdue_issues_count,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
if issue_type == "upcoming":
|
||||
upcoming_issues_count = assigned_issues.filter(
|
||||
state__group__in=["backlog", "unstarted", "started"],
|
||||
target_date__gte=timezone.now(),
|
||||
).count()
|
||||
upcoming_issues = assigned_issues.filter(
|
||||
state__group__in=["backlog", "unstarted", "started"],
|
||||
target_date__gte=timezone.now(),
|
||||
)[:5]
|
||||
return Response(
|
||||
{
|
||||
"issues": IssueSerializer(
|
||||
upcoming_issues, many=True, expand=self.expand
|
||||
).data,
|
||||
"count": upcoming_issues_count,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response(
|
||||
{"error": "Please specify a valid issue type"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
def dashboard_created_issues(self, request, slug):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issue_type = request.GET.get("issue_type", None)
|
||||
|
||||
# get all the assigned issues
|
||||
created_issues = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
created_by=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels", "issue_module__module")
|
||||
.annotate(
|
||||
cycle_id=Subquery(
|
||||
CycleIssue.objects.filter(
|
||||
issue=OuterRef("id"), deleted_at__isnull=True
|
||||
).values("cycle_id")[:1]
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=FileAsset.objects.filter(
|
||||
issue_id=OuterRef("id"),
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=Q(
|
||||
~Q(labels__id__isnull=True)
|
||||
& Q(label_issue__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=Q(
|
||||
~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True)
|
||||
& Q(issue_assignee__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=Q(
|
||||
~Q(issue_module__module_id__isnull=True)
|
||||
& Q(issue_module__module__archived_at__isnull=True)
|
||||
& Q(issue_module__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
.order_by("created_at")
|
||||
)
|
||||
|
||||
# Priority Ordering
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
created_issues = created_issues.annotate(
|
||||
priority_order=Case(
|
||||
*[When(priority=p, then=Value(i)) for i, p in enumerate(priority_order)],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
|
||||
if issue_type == "pending":
|
||||
pending_issues_count = created_issues.filter(
|
||||
state__group__in=["backlog", "started", "unstarted"]
|
||||
).count()
|
||||
pending_issues = created_issues.filter(
|
||||
state__group__in=["backlog", "started", "unstarted"]
|
||||
)[:5]
|
||||
return Response(
|
||||
{
|
||||
"issues": IssueSerializer(
|
||||
pending_issues, many=True, expand=self.expand
|
||||
).data,
|
||||
"count": pending_issues_count,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
if issue_type == "completed":
|
||||
completed_issues_count = created_issues.filter(
|
||||
state__group__in=["completed"]
|
||||
).count()
|
||||
completed_issues = created_issues.filter(state__group__in=["completed"])[:5]
|
||||
return Response(
|
||||
{
|
||||
"issues": IssueSerializer(completed_issues, many=True).data,
|
||||
"count": completed_issues_count,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
if issue_type == "overdue":
|
||||
overdue_issues_count = created_issues.filter(
|
||||
state__group__in=["backlog", "unstarted", "started"],
|
||||
target_date__lt=timezone.now(),
|
||||
).count()
|
||||
overdue_issues = created_issues.filter(
|
||||
state__group__in=["backlog", "unstarted", "started"],
|
||||
target_date__lt=timezone.now(),
|
||||
)[:5]
|
||||
return Response(
|
||||
{
|
||||
"issues": IssueSerializer(overdue_issues, many=True).data,
|
||||
"count": overdue_issues_count,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
if issue_type == "upcoming":
|
||||
upcoming_issues_count = created_issues.filter(
|
||||
state__group__in=["backlog", "unstarted", "started"],
|
||||
target_date__gte=timezone.now(),
|
||||
).count()
|
||||
upcoming_issues = created_issues.filter(
|
||||
state__group__in=["backlog", "unstarted", "started"],
|
||||
target_date__gte=timezone.now(),
|
||||
)[:5]
|
||||
return Response(
|
||||
{
|
||||
"issues": IssueSerializer(upcoming_issues, many=True).data,
|
||||
"count": upcoming_issues_count,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response(
|
||||
{"error": "Please specify a valid issue type"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
def dashboard_issues_by_state_groups(self, request, slug):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||
extra_filters = {}
|
||||
|
||||
if WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug, member=request.user, role=5, is_active=True
|
||||
).exists():
|
||||
extra_filters = {"created_by": request.user}
|
||||
|
||||
issues_by_state_groups = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
assignees__in=[request.user],
|
||||
)
|
||||
.filter(**filters, **extra_filters)
|
||||
.values("state__group")
|
||||
.annotate(count=Count("id"))
|
||||
)
|
||||
|
||||
# default state
|
||||
all_groups = {state: 0 for state in state_order}
|
||||
|
||||
# Update counts for existing groups
|
||||
for entry in issues_by_state_groups:
|
||||
all_groups[entry["state__group"]] = entry["count"]
|
||||
|
||||
# Prepare output including all groups with their counts
|
||||
output_data = [
|
||||
{"state": group, "count": count} for group, count in all_groups.items()
|
||||
]
|
||||
|
||||
return Response(output_data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
def dashboard_issues_by_priority(self, request, slug):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
extra_filters = {}
|
||||
|
||||
if WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug, member=request.user, role=5, is_active=True
|
||||
).exists():
|
||||
extra_filters = {"created_by": request.user}
|
||||
|
||||
issues_by_priority = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
assignees__in=[request.user],
|
||||
)
|
||||
.filter(**filters, **extra_filters)
|
||||
.values("priority")
|
||||
.annotate(count=Count("id"))
|
||||
)
|
||||
|
||||
# default priority
|
||||
all_groups = {priority: 0 for priority in priority_order}
|
||||
|
||||
# Update counts for existing groups
|
||||
for entry in issues_by_priority:
|
||||
all_groups[entry["priority"]] = entry["count"]
|
||||
|
||||
# Prepare output including all groups with their counts
|
||||
output_data = [
|
||||
{"priority": group, "count": count} for group, count in all_groups.items()
|
||||
]
|
||||
|
||||
return Response(output_data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
def dashboard_recent_activity(self, request, slug):
|
||||
queryset = IssueActivity.objects.filter(
|
||||
~Q(field__in=["comment", "vote", "reaction", "draft"]),
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__archived_at__isnull=True,
|
||||
actor=request.user,
|
||||
).select_related("actor", "workspace", "issue", "project")[:8]
|
||||
|
||||
return Response(
|
||||
IssueActivitySerializer(queryset, many=True).data, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
|
||||
def dashboard_recent_projects(self, request, slug):
|
||||
project_ids = (
|
||||
IssueActivity.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__archived_at__isnull=True,
|
||||
actor=request.user,
|
||||
)
|
||||
.values_list("project_id", flat=True)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
# Extract project IDs from the recent projects
|
||||
unique_project_ids = set(project_id for project_id in project_ids)
|
||||
|
||||
# Fetch additional projects only if needed
|
||||
if len(unique_project_ids) < 4:
|
||||
additional_projects = Project.objects.filter(
|
||||
project_projectmember__member=request.user,
|
||||
project_projectmember__is_active=True,
|
||||
archived_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
).exclude(id__in=unique_project_ids)
|
||||
|
||||
# Append additional project IDs to the existing list
|
||||
unique_project_ids.update(additional_projects.values_list("id", flat=True))
|
||||
|
||||
return Response(list(unique_project_ids)[:4], status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
def dashboard_recent_collaborators(self, request, slug):
|
||||
project_members_with_activities = (
|
||||
WorkspaceMember.objects.filter(workspace__slug=slug, is_active=True)
|
||||
.annotate(
|
||||
active_issue_count=Count(
|
||||
Case(
|
||||
When(
|
||||
member__issue_assignee__issue__state__group__in=[
|
||||
"unstarted",
|
||||
"started",
|
||||
],
|
||||
member__issue_assignee__issue__workspace__slug=slug,
|
||||
member__issue_assignee__issue__project__project_projectmember__member=request.user,
|
||||
member__issue_assignee__issue__project__project_projectmember__is_active=True,
|
||||
then=F("member__issue_assignee__issue__id"),
|
||||
),
|
||||
distinct=True,
|
||||
output_field=IntegerField(),
|
||||
),
|
||||
distinct=True,
|
||||
),
|
||||
user_id=F("member_id"),
|
||||
)
|
||||
.values("user_id", "active_issue_count")
|
||||
.order_by("-active_issue_count")
|
||||
.distinct()
|
||||
)
|
||||
return Response((project_members_with_activities), status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class DashboardEndpoint(BaseAPIView):
|
||||
def create(self, request, slug):
|
||||
serializer = DashboardSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def patch(self, request, slug, pk):
|
||||
serializer = DashboardSerializer(data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request, slug, pk):
|
||||
serializer = DashboardSerializer(data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_204_NO_CONTENT)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def get(self, request, slug, dashboard_id=None):
|
||||
if not dashboard_id:
|
||||
dashboard_type = request.GET.get("dashboard_type", None)
|
||||
if dashboard_type == "home":
|
||||
dashboard, created = Dashboard.objects.get_or_create(
|
||||
type_identifier=dashboard_type,
|
||||
owned_by=request.user,
|
||||
is_default=True,
|
||||
)
|
||||
|
||||
if created:
|
||||
widgets_to_fetch = [
|
||||
"overview_stats",
|
||||
"assigned_issues",
|
||||
"created_issues",
|
||||
"issues_by_state_groups",
|
||||
"issues_by_priority",
|
||||
"recent_activity",
|
||||
"recent_projects",
|
||||
"recent_collaborators",
|
||||
]
|
||||
|
||||
updated_dashboard_widgets = []
|
||||
for widget_key in widgets_to_fetch:
|
||||
widget = Widget.objects.filter(key=widget_key).values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
if widget:
|
||||
updated_dashboard_widgets.append(
|
||||
DashboardWidget(
|
||||
widget_id=widget, dashboard_id=dashboard.id
|
||||
)
|
||||
)
|
||||
|
||||
DashboardWidget.objects.bulk_create(
|
||||
updated_dashboard_widgets, batch_size=100
|
||||
)
|
||||
|
||||
widgets = (
|
||||
Widget.objects.annotate(
|
||||
is_visible=Exists(
|
||||
DashboardWidget.objects.filter(
|
||||
widget_id=OuterRef("pk"),
|
||||
dashboard_id=dashboard.id,
|
||||
is_visible=True,
|
||||
)
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
dashboard_filters=Subquery(
|
||||
DashboardWidget.objects.filter(
|
||||
widget_id=OuterRef("pk"),
|
||||
dashboard_id=dashboard.id,
|
||||
filters__isnull=False,
|
||||
)
|
||||
.exclude(filters={})
|
||||
.values("filters")[:1]
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
widget_filters=Case(
|
||||
When(
|
||||
dashboard_filters__isnull=False,
|
||||
then=F("dashboard_filters"),
|
||||
),
|
||||
default=F("filters"),
|
||||
output_field=JSONField(),
|
||||
)
|
||||
)
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"dashboard": DashboardSerializer(dashboard).data,
|
||||
"widgets": WidgetSerializer(widgets, many=True).data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return Response(
|
||||
{"error": "Please specify a valid dashboard type"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
widget_key = request.GET.get("widget_key", "overview_stats")
|
||||
|
||||
WIDGETS_MAPPER = {
|
||||
"overview_stats": dashboard_overview_stats,
|
||||
"assigned_issues": dashboard_assigned_issues,
|
||||
"created_issues": dashboard_created_issues,
|
||||
"issues_by_state_groups": dashboard_issues_by_state_groups,
|
||||
"issues_by_priority": dashboard_issues_by_priority,
|
||||
"recent_activity": dashboard_recent_activity,
|
||||
"recent_projects": dashboard_recent_projects,
|
||||
"recent_collaborators": dashboard_recent_collaborators,
|
||||
}
|
||||
|
||||
func = WIDGETS_MAPPER.get(widget_key)
|
||||
if func is not None:
|
||||
response = func(self, request=request, slug=slug)
|
||||
if isinstance(response, Response):
|
||||
return response
|
||||
|
||||
return Response(
|
||||
{"error": "Please specify a valid widget key"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class WidgetsEndpoint(BaseAPIView):
|
||||
def patch(self, request, dashboard_id, widget_id):
|
||||
dashboard_widget = DashboardWidget.objects.filter(
|
||||
widget_id=widget_id, dashboard_id=dashboard_id
|
||||
).first()
|
||||
dashboard_widget.is_visible = request.data.get(
|
||||
"is_visible", dashboard_widget.is_visible
|
||||
)
|
||||
dashboard_widget.sort_order = request.data.get(
|
||||
"sort_order", dashboard_widget.sort_order
|
||||
)
|
||||
dashboard_widget.filters = request.data.get("filters", dashboard_widget.filters)
|
||||
dashboard_widget.save()
|
||||
return Response({"message": "successfully updated"}, status=status.HTTP_200_OK)
|
||||
220
apiserver/plane/app/views/external/base.py
vendored
220
apiserver/plane/app/views/external/base.py
vendored
@@ -1,71 +1,171 @@
|
||||
# Python imports
|
||||
import requests
|
||||
# Python import
|
||||
import os
|
||||
from typing import List, Dict, Tuple
|
||||
|
||||
# Third party imports
|
||||
# Third party import
|
||||
from openai import OpenAI
|
||||
from rest_framework.response import Response
|
||||
import requests
|
||||
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Django imports
|
||||
|
||||
# Module imports
|
||||
from ..base import BaseAPIView
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.db.models import Workspace, Project
|
||||
from plane.app.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer
|
||||
# Module import
|
||||
from plane.app.permissions import ROLE, allow_permission
|
||||
from plane.app.serializers import (ProjectLiteSerializer,
|
||||
WorkspaceLiteSerializer)
|
||||
from plane.db.models import Project, Workspace
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
from ..base import BaseAPIView
|
||||
|
||||
|
||||
class LLMProvider:
|
||||
"""Base class for LLM provider configurations"""
|
||||
name: str = ""
|
||||
models: List[str] = []
|
||||
default_model: str = ""
|
||||
|
||||
@classmethod
|
||||
def get_config(cls) -> Dict[str, str | List[str]]:
|
||||
return {
|
||||
"name": cls.name,
|
||||
"models": cls.models,
|
||||
"default_model": cls.default_model,
|
||||
}
|
||||
|
||||
class OpenAIProvider(LLMProvider):
|
||||
name = "OpenAI"
|
||||
models = ["gpt-3.5-turbo", "gpt-4o-mini", "gpt-4o", "o1-mini", "o1-preview"]
|
||||
default_model = "gpt-4o-mini"
|
||||
|
||||
class AnthropicProvider(LLMProvider):
|
||||
name = "Anthropic"
|
||||
models = [
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-haiku-20240307",
|
||||
"claude-3-opus-20240229",
|
||||
"claude-3-sonnet-20240229",
|
||||
"claude-2.1",
|
||||
"claude-2",
|
||||
"claude-instant-1.2",
|
||||
"claude-instant-1"
|
||||
]
|
||||
default_model = "claude-3-sonnet-20240229"
|
||||
|
||||
class GeminiProvider(LLMProvider):
|
||||
name = "Gemini"
|
||||
models = ["gemini-pro", "gemini-1.5-pro-latest", "gemini-pro-vision"]
|
||||
default_model = "gemini-pro"
|
||||
|
||||
SUPPORTED_PROVIDERS = {
|
||||
"openai": OpenAIProvider,
|
||||
"anthropic": AnthropicProvider,
|
||||
"gemini": GeminiProvider,
|
||||
}
|
||||
|
||||
def get_llm_config() -> Tuple[str | None, str | None, str | None]:
|
||||
"""
|
||||
Helper to get LLM configuration values, returns:
|
||||
- api_key, model, provider
|
||||
"""
|
||||
api_key, provider_key, model = get_configuration_value([
|
||||
{
|
||||
"key": "LLM_API_KEY",
|
||||
"default": os.environ.get("LLM_API_KEY", None),
|
||||
},
|
||||
{
|
||||
"key": "LLM_PROVIDER",
|
||||
"default": os.environ.get("LLM_PROVIDER", "openai"),
|
||||
},
|
||||
{
|
||||
"key": "LLM_MODEL",
|
||||
"default": os.environ.get("LLM_MODEL", None),
|
||||
},
|
||||
])
|
||||
|
||||
provider = SUPPORTED_PROVIDERS.get(provider_key.lower())
|
||||
if not provider:
|
||||
log_exception(ValueError(f"Unsupported provider: {provider_key}"))
|
||||
return None, None, None
|
||||
|
||||
if not api_key:
|
||||
log_exception(ValueError(f"Missing API key for provider: {provider.name}"))
|
||||
return None, None, None
|
||||
|
||||
# If no model specified, use provider's default
|
||||
if not model:
|
||||
model = provider.default_model
|
||||
|
||||
# Validate model is supported by provider
|
||||
if model not in provider.models:
|
||||
log_exception(ValueError(
|
||||
f"Model {model} not supported by {provider.name}. "
|
||||
f"Supported models: {', '.join(provider.models)}"
|
||||
))
|
||||
return None, None, None
|
||||
|
||||
return api_key, model, provider_key
|
||||
|
||||
|
||||
def get_llm_response(task, prompt, api_key: str, model: str, provider: str) -> Tuple[str | None, str | None]:
|
||||
"""Helper to get LLM completion response"""
|
||||
final_text = task + "\n" + prompt
|
||||
try:
|
||||
# For Gemini, prepend provider name to model
|
||||
if provider.lower() == "gemini":
|
||||
model = f"gemini/{model}"
|
||||
|
||||
client = OpenAI(api_key=api_key)
|
||||
chat_completion = client.chat.completions.create(
|
||||
model=model,
|
||||
messages=[
|
||||
{"role": "user", "content": final_text}
|
||||
]
|
||||
)
|
||||
text = chat_completion.choices[0].message.content
|
||||
return text, None
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
error_type = e.__class__.__name__
|
||||
if error_type == "AuthenticationError":
|
||||
return None, f"Invalid API key for {provider}"
|
||||
elif error_type == "RateLimitError":
|
||||
return None, f"Rate limit exceeded for {provider}"
|
||||
else:
|
||||
return None, f"Error occurred while generating response from {provider}"
|
||||
|
||||
class GPTIntegrationEndpoint(BaseAPIView):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def post(self, request, slug, project_id):
|
||||
OPENAI_API_KEY, GPT_ENGINE = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "OPENAI_API_KEY",
|
||||
"default": os.environ.get("OPENAI_API_KEY", None),
|
||||
},
|
||||
{
|
||||
"key": "GPT_ENGINE",
|
||||
"default": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"),
|
||||
},
|
||||
]
|
||||
)
|
||||
api_key, model, provider = get_llm_config()
|
||||
|
||||
# Get the configuration value
|
||||
# Check the keys
|
||||
if not OPENAI_API_KEY or not GPT_ENGINE:
|
||||
if not api_key or not model or not provider:
|
||||
return Response(
|
||||
{"error": "OpenAI API key and engine is required"},
|
||||
{"error": "LLM provider API key and model are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
prompt = request.data.get("prompt", False)
|
||||
task = request.data.get("task", False)
|
||||
|
||||
if not task:
|
||||
return Response(
|
||||
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
final_text = task + "\n" + prompt
|
||||
|
||||
client = OpenAI(api_key=OPENAI_API_KEY)
|
||||
|
||||
response = client.chat.completions.create(
|
||||
model=GPT_ENGINE, messages=[{"role": "user", "content": final_text}]
|
||||
)
|
||||
text, error = get_llm_response(task, request.data.get("prompt", False), api_key, model, provider)
|
||||
if not text and error:
|
||||
return Response(
|
||||
{"error": "An internal error has occurred."},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
project = Project.objects.get(pk=project_id)
|
||||
|
||||
text = response.choices[0].message.content.strip()
|
||||
text_html = text.replace("\n", "<br/>")
|
||||
return Response(
|
||||
{
|
||||
"response": text,
|
||||
"response_html": text_html,
|
||||
"response_html": text.replace("\n", "<br/>"),
|
||||
"project_detail": ProjectLiteSerializer(project).data,
|
||||
"workspace_detail": WorkspaceLiteSerializer(workspace).data,
|
||||
},
|
||||
@@ -76,47 +176,33 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
||||
class WorkspaceGPTIntegrationEndpoint(BaseAPIView):
|
||||
@allow_permission(allowed_roles=[ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
|
||||
def post(self, request, slug):
|
||||
OPENAI_API_KEY, GPT_ENGINE = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "OPENAI_API_KEY",
|
||||
"default": os.environ.get("OPENAI_API_KEY", None),
|
||||
},
|
||||
{
|
||||
"key": "GPT_ENGINE",
|
||||
"default": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"),
|
||||
},
|
||||
]
|
||||
)
|
||||
api_key, model, provider = get_llm_config()
|
||||
|
||||
# Get the configuration value
|
||||
# Check the keys
|
||||
if not OPENAI_API_KEY or not GPT_ENGINE:
|
||||
if not api_key or not model or not provider:
|
||||
return Response(
|
||||
{"error": "OpenAI API key and engine is required"},
|
||||
{"error": "LLM provider API key and model are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
prompt = request.data.get("prompt", False)
|
||||
task = request.data.get("task", False)
|
||||
|
||||
if not task:
|
||||
return Response(
|
||||
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
final_text = task + "\n" + prompt
|
||||
text, error = get_llm_response(task, request.data.get("prompt", False), api_key, model, provider)
|
||||
if not text and error:
|
||||
return Response(
|
||||
{"error": "An internal error has occurred."},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
client = OpenAI(api_key=OPENAI_API_KEY)
|
||||
|
||||
response = client.chat.completions.create(
|
||||
model=GPT_ENGINE, messages=[{"role": "user", "content": final_text}]
|
||||
)
|
||||
|
||||
text = response.choices[0].message.content.strip()
|
||||
text_html = text.replace("\n", "<br/>")
|
||||
return Response(
|
||||
{"response": text, "response_html": text_html}, status=status.HTTP_200_OK
|
||||
{
|
||||
"response": text,
|
||||
"response_html": text.replace("\n", "<br/>"),
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@ from plane.app.serializers import (
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IntakeViewSet(BaseViewSet):
|
||||
@@ -174,14 +175,19 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def list(self, request, slug, project_id):
|
||||
intake_id = Intake.objects.filter(
|
||||
intake = Intake.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).first()
|
||||
if not intake:
|
||||
return Response(
|
||||
{"error": "Intake not found"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
project = Project.objects.get(pk=project_id)
|
||||
filters = issue_filters(request.GET, "GET", "issue__")
|
||||
intake_issue = (
|
||||
IntakeIssue.objects.filter(
|
||||
intake_id=intake_id.id, project_id=project_id, **filters
|
||||
intake_id=intake.id, project_id=project_id, **filters
|
||||
)
|
||||
.select_related("issue")
|
||||
.prefetch_related("issue__labels")
|
||||
@@ -278,7 +284,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
intake=str(intake_issue.id),
|
||||
)
|
||||
intake_issue = (
|
||||
@@ -382,7 +388,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
}
|
||||
|
||||
issue_serializer = IssueCreateSerializer(
|
||||
issue, data=issue_data, partial=True
|
||||
issue, data=issue_data, partial=True, context={"project_id": project_id}
|
||||
)
|
||||
|
||||
if issue_serializer.is_valid():
|
||||
@@ -402,7 +408,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
intake=str(intake_issue.id),
|
||||
)
|
||||
issue_serializer.save()
|
||||
@@ -462,7 +468,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=False,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
intake=(intake_issue.id),
|
||||
)
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ from rest_framework import status
|
||||
from .. import BaseAPIView
|
||||
from plane.app.serializers import IssueActivitySerializer, IssueCommentSerializer
|
||||
from plane.app.permissions import ProjectEntityPermission, allow_permission, ROLE
|
||||
from plane.db.models import IssueActivity, IssueComment, CommentReaction
|
||||
from plane.db.models import IssueActivity, IssueComment, CommentReaction, IntakeIssue
|
||||
|
||||
|
||||
class IssueActivityEndpoint(BaseAPIView):
|
||||
@@ -57,13 +57,22 @@ class IssueActivityEndpoint(BaseAPIView):
|
||||
)
|
||||
)
|
||||
)
|
||||
issue_activities = IssueActivitySerializer(issue_activities, many=True).data
|
||||
issue_comments = IssueCommentSerializer(issue_comments, many=True).data
|
||||
|
||||
if request.GET.get("activity_type", None) == "issue-property":
|
||||
issue_activities = issue_activities.prefetch_related(
|
||||
Prefetch(
|
||||
"issue__issue_intake",
|
||||
queryset=IntakeIssue.objects.only(
|
||||
"source_email", "source", "extra"
|
||||
),
|
||||
to_attr="source_data",
|
||||
)
|
||||
)
|
||||
issue_activities = IssueActivitySerializer(issue_activities, many=True).data
|
||||
return Response(issue_activities, status=status.HTTP_200_OK)
|
||||
|
||||
if request.GET.get("activity_type", None) == "issue-comment":
|
||||
issue_comments = IssueCommentSerializer(issue_comments, many=True).data
|
||||
return Response(issue_comments, status=status.HTTP_200_OK)
|
||||
|
||||
result_list = sorted(
|
||||
|
||||
@@ -37,7 +37,7 @@ from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPaginator
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.utils.error_codes import ERROR_CODES
|
||||
|
||||
from plane.utils.host import base_host
|
||||
# Module imports
|
||||
from .. import BaseViewSet, BaseAPIView
|
||||
|
||||
@@ -259,7 +259,7 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue.archived_at = timezone.now().date()
|
||||
issue.save()
|
||||
@@ -287,7 +287,7 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue.archived_at = None
|
||||
issue.save()
|
||||
@@ -333,7 +333,7 @@ class BulkArchiveIssuesEndpoint(BaseAPIView):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue.archived_at = timezone.now().date()
|
||||
bulk_archive_issues.append(issue)
|
||||
|
||||
@@ -21,7 +21,7 @@ from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.settings.storage import S3Storage
|
||||
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueAttachmentEndpoint(BaseAPIView):
|
||||
serializer_class = IssueAttachmentSerializer
|
||||
@@ -48,7 +48,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
current_instance=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -67,7 +67,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -120,10 +120,12 @@ class IssueAttachmentV2Endpoint(BaseAPIView):
|
||||
|
||||
# Get the presigned URL
|
||||
storage = S3Storage(request=request)
|
||||
|
||||
# Generate a presigned URL to share an S3 object
|
||||
presigned_url = storage.generate_presigned_post(
|
||||
object_name=asset_key, file_type=type, file_size=size_limit
|
||||
)
|
||||
|
||||
# Return the presigned URL
|
||||
return Response(
|
||||
{
|
||||
@@ -153,7 +155,7 @@ class IssueAttachmentV2Endpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -211,7 +213,7 @@ class IssueAttachmentV2Endpoint(BaseAPIView):
|
||||
current_instance=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
# Update the attachment
|
||||
|
||||
@@ -44,6 +44,8 @@ from plane.db.models import (
|
||||
Project,
|
||||
ProjectMember,
|
||||
CycleIssue,
|
||||
UserRecentVisit,
|
||||
ModuleIssue,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
@@ -59,7 +61,7 @@ from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
from plane.utils.global_paginator import paginate
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from plane.bgtasks.issue_description_version_task import issue_description_version_task
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueListEndpoint(BaseAPIView):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
@@ -377,7 +379,7 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue = (
|
||||
issue_queryset_grouper(
|
||||
@@ -427,7 +429,7 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
# updated issue description version
|
||||
issue_description_version_task.delay(
|
||||
@@ -546,7 +548,7 @@ class IssueViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
"""
|
||||
if the role is guest and guest_view_all_features is false and owned by is not
|
||||
if the role is guest and guest_view_all_features is false and owned by is not
|
||||
the requesting user then dont show the issue
|
||||
"""
|
||||
|
||||
@@ -634,7 +636,9 @@ class IssueViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
|
||||
serializer = IssueCreateSerializer(issue, data=request.data, partial=True)
|
||||
serializer = IssueCreateSerializer(
|
||||
issue, data=request.data, partial=True, context={"project_id": project_id}
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
issue_activity.delay(
|
||||
@@ -646,7 +650,7 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
model_activity.delay(
|
||||
model_name="issue",
|
||||
@@ -655,7 +659,7 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
# updated issue description version
|
||||
issue_description_version_task.delay(
|
||||
@@ -671,6 +675,13 @@ class IssueViewSet(BaseViewSet):
|
||||
issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
|
||||
issue.delete()
|
||||
# delete the issue from recent visits
|
||||
UserRecentVisit.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
entity_identifier=pk,
|
||||
entity_name="issue",
|
||||
).delete(soft=False)
|
||||
issue_activity.delay(
|
||||
type="issue.activity.deleted",
|
||||
requested_data=json.dumps({"issue_id": str(pk)}),
|
||||
@@ -680,7 +691,8 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance={},
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
subscriber=False,
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -728,6 +740,13 @@ class BulkDeleteIssuesEndpoint(BaseAPIView):
|
||||
|
||||
total_issues = len(issues)
|
||||
|
||||
# First, delete all related cycle issues
|
||||
CycleIssue.objects.filter(issue_id__in=issue_ids).delete()
|
||||
|
||||
# Then, delete all related module issues
|
||||
ModuleIssue.objects.filter(issue_id__in=issue_ids).delete()
|
||||
|
||||
# Finally, delete the issues themselves
|
||||
issues.delete()
|
||||
|
||||
return Response(
|
||||
@@ -1088,3 +1107,188 @@ class IssueBulkUpdateDateEndpoint(BaseAPIView):
|
||||
return Response(
|
||||
{"message": "Issues updated successfully"}, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
|
||||
class IssueMetaEndpoint(BaseAPIView):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="PROJECT")
|
||||
def get(self, request, slug, project_id, issue_id):
|
||||
issue = Issue.issue_objects.only("sequence_id", "project__identifier").get(
|
||||
id=issue_id, project_id=project_id, workspace__slug=slug
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"sequence_id": issue.sequence_id,
|
||||
"project_identifier": issue.project.identifier,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class IssueDetailIdentifierEndpoint(BaseAPIView):
|
||||
def strict_str_to_int(self, s):
|
||||
if not s.isdigit() and not (s.startswith("-") and s[1:].isdigit()):
|
||||
raise ValueError("Invalid integer string")
|
||||
return int(s)
|
||||
|
||||
def get(self, request, slug, project_identifier, issue_identifier):
|
||||
# Check if the issue identifier is a valid integer
|
||||
try:
|
||||
issue_identifier = self.strict_str_to_int(issue_identifier)
|
||||
except ValueError:
|
||||
return Response(
|
||||
{"error": "Invalid issue identifier"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Fetch the project
|
||||
project = Project.objects.get(
|
||||
identifier__iexact=project_identifier, workspace__slug=slug
|
||||
)
|
||||
|
||||
# Check if the user is a member of the project
|
||||
if not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project.id,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
).exists():
|
||||
return Response(
|
||||
{"error": "You are not allowed to view this issue"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
# Fetch the issue
|
||||
issue = (
|
||||
Issue.issue_objects.filter(project_id=project.id)
|
||||
.filter(workspace__slug=slug)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels", "issue_module__module")
|
||||
.annotate(
|
||||
cycle_id=Subquery(
|
||||
CycleIssue.objects.filter(issue=OuterRef("id")).values("cycle_id")[
|
||||
:1
|
||||
]
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=FileAsset.objects.filter(
|
||||
issue_id=OuterRef("id"),
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.filter(sequence_id=issue_identifier)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=Q(
|
||||
~Q(labels__id__isnull=True)
|
||||
& Q(label_issue__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=Q(
|
||||
~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True)
|
||||
& Q(issue_assignee__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=Q(
|
||||
~Q(issue_module__module_id__isnull=True)
|
||||
& Q(issue_module__module__archived_at__isnull=True)
|
||||
& Q(issue_module__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
queryset=IssueReaction.objects.select_related("issue", "actor"),
|
||||
)
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_link",
|
||||
queryset=IssueLink.objects.select_related("created_by"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
is_subscribed=Exists(
|
||||
IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project.id,
|
||||
issue__sequence_id=issue_identifier,
|
||||
subscriber=request.user,
|
||||
)
|
||||
)
|
||||
)
|
||||
).first()
|
||||
|
||||
# Check if the issue exists
|
||||
if not issue:
|
||||
return Response(
|
||||
{"error": "The required object does not exist."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
"""
|
||||
if the role is guest and guest_view_all_features is false and owned by is not
|
||||
the requesting user then dont show the issue
|
||||
"""
|
||||
|
||||
if (
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project.id,
|
||||
member=request.user,
|
||||
role=5,
|
||||
is_active=True,
|
||||
).exists()
|
||||
and not project.guest_view_all_features
|
||||
and not issue.created_by == request.user
|
||||
):
|
||||
return Response(
|
||||
{"error": "You are not allowed to view this issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
recent_visited_task.delay(
|
||||
slug=slug,
|
||||
entity_name="issue",
|
||||
entity_identifier=str(issue.id),
|
||||
user_id=str(request.user.id),
|
||||
project_id=str(project.id),
|
||||
)
|
||||
|
||||
# Serialize the issue
|
||||
serializer = IssueDetailSerializer(issue, expand=self.expand)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -5,6 +5,7 @@ import json
|
||||
from django.utils import timezone
|
||||
from django.db.models import Exists
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db import IntegrityError
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
@@ -16,7 +17,7 @@ from plane.app.serializers import IssueCommentSerializer, CommentReactionSeriali
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.db.models import IssueComment, ProjectMember, CommentReaction, Project, Issue
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueCommentViewSet(BaseViewSet):
|
||||
serializer_class = IssueCommentSerializer
|
||||
@@ -86,7 +87,7 @@ class IssueCommentViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -104,7 +105,13 @@ class IssueCommentViewSet(BaseViewSet):
|
||||
issue_comment, data=request.data, partial=True
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
if (
|
||||
"comment_html" in request.data
|
||||
and request.data["comment_html"] != issue_comment.comment_html
|
||||
):
|
||||
serializer.save(edited_at=timezone.now())
|
||||
else:
|
||||
serializer.save()
|
||||
issue_activity.delay(
|
||||
type="comment.activity.updated",
|
||||
requested_data=requested_data,
|
||||
@@ -114,7 +121,7 @@ class IssueCommentViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -137,7 +144,7 @@ class IssueCommentViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -164,24 +171,32 @@ class CommentReactionViewSet(BaseViewSet):
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def create(self, request, slug, project_id, comment_id):
|
||||
serializer = CommentReactionSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
project_id=project_id, actor_id=request.user.id, comment_id=comment_id
|
||||
try:
|
||||
serializer = CommentReactionSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
project_id=project_id,
|
||||
actor_id=request.user.id,
|
||||
comment_id=comment_id,
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="comment_reaction.activity.created",
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except IntegrityError:
|
||||
return Response(
|
||||
{"error": "Reaction already exists for the user"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="comment_reaction.activity.created",
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def destroy(self, request, slug, project_id, comment_id, reaction_code):
|
||||
@@ -207,7 +222,7 @@ class CommentReactionViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
comment_reaction.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -35,7 +35,9 @@ class LabelViewSet(BaseViewSet):
|
||||
.order_by("sort_order")
|
||||
)
|
||||
|
||||
@invalidate_cache(path="/api/workspaces/:slug/labels/", url_params=True, user=False)
|
||||
@invalidate_cache(
|
||||
path="/api/workspaces/:slug/labels/", url_params=True, user=False, multiple=True
|
||||
)
|
||||
@allow_permission([ROLE.ADMIN])
|
||||
def create(self, request, slug, project_id):
|
||||
try:
|
||||
@@ -53,6 +55,20 @@ class LabelViewSet(BaseViewSet):
|
||||
@invalidate_cache(path="/api/workspaces/:slug/labels/", url_params=True, user=False)
|
||||
@allow_permission([ROLE.ADMIN])
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
# Check if the label name is unique within the project
|
||||
if (
|
||||
"name" in request.data
|
||||
and Label.objects.filter(
|
||||
project_id=kwargs["project_id"], name=request.data["name"]
|
||||
)
|
||||
.exclude(pk=kwargs["pk"])
|
||||
.exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Label with the same name already exists in the project"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
# call the parent method to perform the update
|
||||
return super().partial_update(request, *args, **kwargs)
|
||||
|
||||
@invalidate_cache(path="/api/workspaces/:slug/labels/", url_params=True, user=False)
|
||||
@@ -72,7 +88,7 @@ class BulkCreateIssueLabelsEndpoint(BaseAPIView):
|
||||
Label(
|
||||
name=label.get("name", "Migrated"),
|
||||
description=label.get("description", "Migrated Issue"),
|
||||
color=f"#{random.randint(0, 0xFFFFFF+1):06X}",
|
||||
color=f"#{random.randint(0, 0xFFFFFF + 1):06X}",
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
|
||||
@@ -15,7 +15,7 @@ from plane.app.serializers import IssueLinkSerializer
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import IssueLink
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueLinkViewSet(BaseViewSet):
|
||||
permission_classes = [ProjectEntityPermission]
|
||||
@@ -52,7 +52,7 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -77,7 +77,7 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -98,7 +98,7 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue_link.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -15,7 +15,7 @@ from plane.app.serializers import IssueReactionSerializer
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.db.models import IssueReaction
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueReactionViewSet(BaseViewSet):
|
||||
serializer_class = IssueReactionSerializer
|
||||
@@ -53,7 +53,7 @@ class IssueReactionViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -78,7 +78,7 @@ class IssueReactionViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue_reaction.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -27,7 +27,7 @@ from plane.db.models import (
|
||||
)
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_relation_mapper import get_actual_relation
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueRelationViewSet(BaseViewSet):
|
||||
serializer_class = IssueRelationSerializer
|
||||
@@ -253,7 +253,7 @@ class IssueRelationViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
if relation_type in ["blocking", "start_after", "finish_after"]:
|
||||
@@ -268,27 +268,19 @@ class IssueRelationViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
def remove_relation(self, request, slug, project_id, issue_id):
|
||||
relation_type = request.data.get("relation_type", None)
|
||||
related_issue = request.data.get("related_issue", None)
|
||||
|
||||
if relation_type in ["blocking", "start_after", "finish_after"]:
|
||||
issue_relation = IssueRelation.objects.get(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_id=related_issue,
|
||||
related_issue_id=issue_id,
|
||||
)
|
||||
else:
|
||||
issue_relation = IssueRelation.objects.get(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_id=issue_id,
|
||||
related_issue_id=related_issue,
|
||||
)
|
||||
current_instance = json.dumps(
|
||||
IssueRelationSerializer(issue_relation).data, cls=DjangoJSONEncoder
|
||||
issue_relations = IssueRelation.objects.filter(
|
||||
workspace__slug=slug,
|
||||
).filter(
|
||||
Q(issue_id=related_issue, related_issue_id=issue_id)
|
||||
| Q(issue_id=issue_id, related_issue_id=related_issue)
|
||||
)
|
||||
issue_relation.delete()
|
||||
issue_relations = issue_relations.first()
|
||||
current_instance = json.dumps(
|
||||
IssueRelationSerializer(issue_relations).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
issue_relations.delete()
|
||||
issue_activity.delay(
|
||||
type="issue_relation.activity.deleted",
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
@@ -298,6 +290,6 @@ class IssueRelationViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -22,7 +22,7 @@ from plane.db.models import Issue, IssueLink, FileAsset, CycleIssue
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.timezone_converter import user_timezone_converter
|
||||
from collections import defaultdict
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class SubIssuesEndpoint(BaseAPIView):
|
||||
permission_classes = [ProjectEntityPermission]
|
||||
@@ -176,7 +176,7 @@ class SubIssuesEndpoint(BaseAPIView):
|
||||
current_instance=json.dumps({"parent": str(sub_issue_id)}),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for sub_issue_id in sub_issue_ids
|
||||
]
|
||||
|
||||
118
apiserver/plane/app/views/issue/version.py
Normal file
118
apiserver/plane/app/views/issue/version.py
Normal file
@@ -0,0 +1,118 @@
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import IssueVersion, IssueDescriptionVersion
|
||||
from ..base import BaseAPIView
|
||||
from plane.app.serializers import (
|
||||
IssueVersionDetailSerializer,
|
||||
IssueDescriptionVersionDetailSerializer,
|
||||
)
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.utils.global_paginator import paginate
|
||||
from plane.utils.timezone_converter import user_timezone_converter
|
||||
|
||||
|
||||
class IssueVersionEndpoint(BaseAPIView):
|
||||
def process_paginated_result(self, fields, results, timezone):
|
||||
paginated_data = results.values(*fields)
|
||||
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
paginated_data = user_timezone_converter(
|
||||
paginated_data, datetime_fields, timezone
|
||||
)
|
||||
|
||||
return paginated_data
|
||||
|
||||
@allow_permission(allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def get(self, request, slug, project_id, issue_id, pk=None):
|
||||
if pk:
|
||||
issue_version = IssueVersion.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
|
||||
)
|
||||
|
||||
serializer = IssueVersionDetailSerializer(issue_version)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
cursor = request.GET.get("cursor", None)
|
||||
|
||||
required_fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"last_saved_at",
|
||||
"owned_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
]
|
||||
|
||||
issue_versions_queryset = IssueVersion.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id
|
||||
)
|
||||
|
||||
paginated_data = paginate(
|
||||
base_queryset=issue_versions_queryset,
|
||||
queryset=issue_versions_queryset,
|
||||
cursor=cursor,
|
||||
on_result=lambda results: self.process_paginated_result(
|
||||
required_fields, results, request.user.user_timezone
|
||||
),
|
||||
)
|
||||
|
||||
return Response(paginated_data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class IssueDescriptionVersionEndpoint(BaseAPIView):
|
||||
def process_paginated_result(self, fields, results, timezone):
|
||||
paginated_data = results.values(*fields)
|
||||
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
paginated_data = user_timezone_converter(
|
||||
paginated_data, datetime_fields, timezone
|
||||
)
|
||||
|
||||
return paginated_data
|
||||
|
||||
@allow_permission(allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def get(self, request, slug, project_id, issue_id, pk=None):
|
||||
if pk:
|
||||
issue_description_version = IssueDescriptionVersion.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
|
||||
)
|
||||
|
||||
serializer = IssueDescriptionVersionDetailSerializer(
|
||||
issue_description_version
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
cursor = request.GET.get("cursor", None)
|
||||
|
||||
required_fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"last_saved_at",
|
||||
"owned_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
]
|
||||
|
||||
issue_description_versions_queryset = IssueDescriptionVersion.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id
|
||||
)
|
||||
paginated_data = paginate(
|
||||
base_queryset=issue_description_versions_queryset,
|
||||
queryset=issue_description_versions_queryset,
|
||||
cursor=cursor,
|
||||
on_result=lambda results: self.process_paginated_result(
|
||||
required_fields, results, request.user.user_timezone
|
||||
),
|
||||
)
|
||||
return Response(paginated_data, status=status.HTTP_200_OK)
|
||||
@@ -54,13 +54,14 @@ from plane.db.models import (
|
||||
ModuleLink,
|
||||
ModuleUserProperties,
|
||||
Project,
|
||||
UserRecentVisit,
|
||||
)
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
from plane.utils.timezone_converter import user_timezone_converter
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from .. import BaseAPIView, BaseViewSet
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class ModuleViewSet(BaseViewSet):
|
||||
model = Module
|
||||
@@ -375,7 +376,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
module = user_timezone_converter(
|
||||
@@ -767,7 +768,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
@@ -794,7 +795,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
current_instance=json.dumps({"module_name": str(module.name)}),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for issue in module_issues
|
||||
]
|
||||
@@ -808,6 +809,13 @@ class ModuleViewSet(BaseViewSet):
|
||||
entity_identifier=pk,
|
||||
project_id=project_id,
|
||||
).delete()
|
||||
# delete the module from recent visits
|
||||
UserRecentVisit.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
entity_identifier=pk,
|
||||
entity_name="module",
|
||||
).delete(soft=False)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class ModuleIssueViewSet(BaseViewSet):
|
||||
serializer_class = ModuleIssueSerializer
|
||||
@@ -221,7 +221,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for issue in issues
|
||||
]
|
||||
@@ -261,7 +261,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for module in modules
|
||||
]
|
||||
@@ -280,11 +280,11 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
{"module_name": module_issue.first().module.name}
|
||||
{"module_name": module_issue.first().module.name if (module_issue.first() and module_issue.first().module) else None}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
module_issue.delete()
|
||||
|
||||
@@ -309,7 +309,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
module_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -33,13 +33,14 @@ from plane.db.models import (
|
||||
ProjectMember,
|
||||
ProjectPage,
|
||||
Project,
|
||||
UserRecentVisit,
|
||||
)
|
||||
from plane.utils.error_codes import ERROR_CODES
|
||||
from ..base import BaseAPIView, BaseViewSet
|
||||
from plane.bgtasks.page_transaction_task import page_transaction
|
||||
from plane.bgtasks.page_version_task import page_version
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
|
||||
from plane.bgtasks.copy_s3_object import copy_s3_objects
|
||||
|
||||
def unarchive_archive_page_and_descendants(page_id, archived_at):
|
||||
# Your SQL query
|
||||
@@ -121,6 +122,8 @@ class PageViewSet(BaseViewSet):
|
||||
context={
|
||||
"project_id": project_id,
|
||||
"owned_by_id": request.user.id,
|
||||
"description": request.data.get("description", {}),
|
||||
"description_binary": request.data.get("description_binary", None),
|
||||
"description_html": request.data.get("description_html", "<p></p>"),
|
||||
},
|
||||
)
|
||||
@@ -385,6 +388,13 @@ class PageViewSet(BaseViewSet):
|
||||
entity_identifier=pk,
|
||||
entity_type="page",
|
||||
).delete()
|
||||
# Delete the page from recent visit
|
||||
UserRecentVisit.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
entity_identifier=pk,
|
||||
entity_name="page",
|
||||
).delete(soft=False)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@@ -553,3 +563,61 @@ class PagesDescriptionViewSet(BaseViewSet):
|
||||
return Response({"message": "Updated successfully"})
|
||||
else:
|
||||
return Response({"error": "No binary data provided"})
|
||||
|
||||
|
||||
class PageDuplicateEndpoint(BaseAPIView):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def post(self, request, slug, project_id, page_id):
|
||||
page = Page.objects.filter(
|
||||
pk=page_id, workspace__slug=slug, projects__id=project_id
|
||||
).first()
|
||||
|
||||
# get all the project ids where page is present
|
||||
project_ids = ProjectPage.objects.filter(page_id=page_id).values_list(
|
||||
"project_id", flat=True
|
||||
)
|
||||
|
||||
page.pk = None
|
||||
page.name = f"{page.name} (Copy)"
|
||||
page.description_binary = None
|
||||
page.owned_by = request.user
|
||||
page.created_by = request.user
|
||||
page.updated_by = request.user
|
||||
page.save()
|
||||
|
||||
for project_id in project_ids:
|
||||
ProjectPage.objects.create(
|
||||
workspace_id=page.workspace_id,
|
||||
project_id=project_id,
|
||||
page_id=page.id,
|
||||
created_by_id=page.created_by_id,
|
||||
updated_by_id=page.updated_by_id,
|
||||
)
|
||||
|
||||
page_transaction.delay(
|
||||
{"description_html": page.description_html}, None, page.id
|
||||
)
|
||||
|
||||
# Copy the s3 objects uploaded in the page
|
||||
copy_s3_objects.delay(
|
||||
entity_name="PAGE",
|
||||
entity_identifier=page.id,
|
||||
project_id=project_id,
|
||||
slug=slug,
|
||||
user_id=request.user.id,
|
||||
)
|
||||
|
||||
page = (
|
||||
Page.objects.filter(pk=page.id)
|
||||
.annotate(
|
||||
project_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"projects__id", distinct=True, filter=~Q(projects__id=True)
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
serializer = PageDetailSerializer(page)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
@@ -6,7 +6,7 @@ import json
|
||||
|
||||
# Django imports
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import Exists, F, Func, OuterRef, Prefetch, Q, Subquery
|
||||
from django.db.models import Exists, F, OuterRef, Prefetch, Q, Subquery
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
# Third Party imports
|
||||
@@ -25,12 +25,9 @@ from plane.app.serializers import (
|
||||
from plane.app.permissions import ProjectMemberPermission, allow_permission, ROLE
|
||||
from plane.db.models import (
|
||||
UserFavorite,
|
||||
Cycle,
|
||||
Intake,
|
||||
DeployBoard,
|
||||
IssueUserProperty,
|
||||
Issue,
|
||||
Module,
|
||||
Project,
|
||||
ProjectIdentifier,
|
||||
ProjectMember,
|
||||
@@ -39,10 +36,10 @@ from plane.db.models import (
|
||||
WorkspaceMember,
|
||||
)
|
||||
from plane.utils.cache import cache_response
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from plane.bgtasks.webhook_task import model_activity, webhook_activity
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class ProjectViewSet(BaseViewSet):
|
||||
serializer_class = ProjectListSerializer
|
||||
@@ -73,36 +70,6 @@ class ProjectViewSet(BaseViewSet):
|
||||
)
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
is_member=Exists(
|
||||
ProjectMember.objects.filter(
|
||||
member=self.request.user,
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
is_active=True,
|
||||
)
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
total_members=ProjectMember.objects.filter(
|
||||
project_id=OuterRef("id"), member__is_bot=False, is_active=True
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
total_cycles=Cycle.objects.filter(project_id=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
total_modules=Module.objects.filter(project_id=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
member_role=ProjectMember.objects.filter(
|
||||
project_id=OuterRef("pk"),
|
||||
@@ -133,7 +100,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
@allow_permission(
|
||||
allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE"
|
||||
)
|
||||
def list(self, request, slug):
|
||||
def list_detail(self, request, slug):
|
||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
||||
projects = self.get_queryset().order_by("sort_order", "name")
|
||||
if WorkspaceMember.objects.filter(
|
||||
@@ -170,6 +137,75 @@ class ProjectViewSet(BaseViewSet):
|
||||
).data
|
||||
return Response(projects, status=status.HTTP_200_OK)
|
||||
|
||||
@allow_permission(
|
||||
allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE"
|
||||
)
|
||||
def list(self, request, slug):
|
||||
sort_order = ProjectMember.objects.filter(
|
||||
member=self.request.user,
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
is_active=True,
|
||||
).values("sort_order")
|
||||
|
||||
projects = (
|
||||
Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.select_related(
|
||||
"workspace", "workspace__owner", "default_assignee", "project_lead"
|
||||
)
|
||||
.annotate(
|
||||
member_role=ProjectMember.objects.filter(
|
||||
project_id=OuterRef("pk"),
|
||||
member_id=self.request.user.id,
|
||||
is_active=True,
|
||||
).values("role")
|
||||
)
|
||||
.annotate(inbox_view=F("intake_view"))
|
||||
.annotate(sort_order=Subquery(sort_order))
|
||||
.distinct()
|
||||
).values(
|
||||
"id",
|
||||
"name",
|
||||
"identifier",
|
||||
"sort_order",
|
||||
"logo_props",
|
||||
"member_role",
|
||||
"archived_at",
|
||||
"workspace",
|
||||
"cycle_view",
|
||||
"issue_views_view",
|
||||
"module_view",
|
||||
"page_view",
|
||||
"inbox_view",
|
||||
"guest_view_all_features",
|
||||
"project_lead",
|
||||
"network",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
)
|
||||
|
||||
if WorkspaceMember.objects.filter(
|
||||
member=request.user, workspace__slug=slug, is_active=True, role=5
|
||||
).exists():
|
||||
projects = projects.filter(
|
||||
project_projectmember__member=self.request.user,
|
||||
project_projectmember__is_active=True,
|
||||
)
|
||||
|
||||
if WorkspaceMember.objects.filter(
|
||||
member=request.user, workspace__slug=slug, is_active=True, role=15
|
||||
).exists():
|
||||
projects = projects.filter(
|
||||
Q(
|
||||
project_projectmember__member=self.request.user,
|
||||
project_projectmember__is_active=True,
|
||||
)
|
||||
| Q(network=2)
|
||||
)
|
||||
return Response(projects, status=status.HTTP_200_OK)
|
||||
|
||||
@allow_permission(
|
||||
allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE"
|
||||
)
|
||||
@@ -182,58 +218,6 @@ class ProjectViewSet(BaseViewSet):
|
||||
)
|
||||
.filter(archived_at__isnull=True)
|
||||
.filter(pk=pk)
|
||||
.annotate(
|
||||
total_issues=Issue.issue_objects.filter(
|
||||
project_id=self.kwargs.get("pk")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
sub_issues=Issue.issue_objects.filter(
|
||||
project_id=self.kwargs.get("pk"), parent__isnull=False
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
archived_issues=Issue.objects.filter(
|
||||
project_id=self.kwargs.get("pk"), archived_at__isnull=False
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
archived_sub_issues=Issue.objects.filter(
|
||||
project_id=self.kwargs.get("pk"),
|
||||
archived_at__isnull=False,
|
||||
parent__isnull=False,
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
draft_issues=Issue.objects.filter(
|
||||
project_id=self.kwargs.get("pk"), is_draft=True
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
draft_sub_issues=Issue.objects.filter(
|
||||
project_id=self.kwargs.get("pk"),
|
||||
is_draft=True,
|
||||
parent__isnull=False,
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
).first()
|
||||
|
||||
if project is None:
|
||||
@@ -347,7 +331,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
serializer = ProjectListSerializer(project)
|
||||
@@ -416,16 +400,6 @@ class ProjectViewSet(BaseViewSet):
|
||||
is_default=True,
|
||||
)
|
||||
|
||||
# Create the triage state in Backlog group
|
||||
State.objects.get_or_create(
|
||||
name="Triage",
|
||||
group="triage",
|
||||
description="Default state for managing all Intake Issues",
|
||||
project_id=pk,
|
||||
color="#ff7700",
|
||||
is_triage=True,
|
||||
)
|
||||
|
||||
project = self.get_queryset().filter(pk=serializer.data["id"]).first()
|
||||
|
||||
model_activity.delay(
|
||||
@@ -435,7 +409,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
serializer = ProjectListSerializer(project)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -472,7 +446,19 @@ class ProjectViewSet(BaseViewSet):
|
||||
):
|
||||
project = Project.objects.get(pk=pk)
|
||||
project.delete()
|
||||
|
||||
webhook_activity.delay(
|
||||
event="project",
|
||||
verb="deleted",
|
||||
field=None,
|
||||
old_value=None,
|
||||
new_value=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
current_site=base_host(request=request, is_app=True),
|
||||
event_id=project.id,
|
||||
old_identifier=None,
|
||||
new_identifier=None,
|
||||
)
|
||||
# Delete the project members
|
||||
DeployBoard.objects.filter(project_id=pk, workspace__slug=slug).delete()
|
||||
|
||||
|
||||
@@ -16,18 +16,18 @@ from rest_framework.permissions import AllowAny
|
||||
# Module imports
|
||||
from .base import BaseViewSet, BaseAPIView
|
||||
from plane.app.serializers import ProjectMemberInviteSerializer
|
||||
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
|
||||
from plane.db.models import (
|
||||
ProjectMember,
|
||||
Workspace,
|
||||
ProjectMemberInvite,
|
||||
User,
|
||||
WorkspaceMember,
|
||||
Project,
|
||||
IssueUserProperty,
|
||||
)
|
||||
|
||||
from plane.db.models.project import ProjectNetwork
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class ProjectInvitationsViewset(BaseViewSet):
|
||||
serializer_class = ProjectMemberInviteSerializer
|
||||
@@ -99,7 +99,7 @@ class ProjectInvitationsViewset(BaseViewSet):
|
||||
project_invitations = ProjectMemberInvite.objects.bulk_create(
|
||||
project_invitations, batch_size=10, ignore_conflicts=True
|
||||
)
|
||||
current_site = request.META.get("HTTP_ORIGIN")
|
||||
current_site = base_host(request=request, is_app=True)
|
||||
|
||||
# Send invitations
|
||||
for invitation in project_invitations:
|
||||
@@ -128,6 +128,7 @@ class UserProjectInvitationsViewset(BaseViewSet):
|
||||
.select_related("workspace", "workspace__owner", "project")
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
|
||||
def create(self, request, slug):
|
||||
project_ids = request.data.get("project_ids", [])
|
||||
|
||||
@@ -136,11 +137,20 @@ class UserProjectInvitationsViewset(BaseViewSet):
|
||||
member=request.user, workspace__slug=slug, is_active=True
|
||||
)
|
||||
|
||||
if workspace_member.role not in [ROLE.ADMIN.value, ROLE.MEMBER.value]:
|
||||
return Response(
|
||||
{"error": "You do not have permission to join the project"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
# Get all the projects
|
||||
projects = Project.objects.filter(
|
||||
id__in=project_ids, workspace__slug=slug
|
||||
).only("id", "network")
|
||||
# Check if user has permission to join each project
|
||||
for project in projects:
|
||||
if (
|
||||
project.network == ProjectNetwork.SECRET.value
|
||||
and workspace_member.role != ROLE.ADMIN.value
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only workspace admins can join private project"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
workspace_role = workspace_member.role
|
||||
workspace = workspace_member.workspace
|
||||
|
||||
@@ -10,11 +10,7 @@ from plane.app.serializers import (
|
||||
ProjectMemberRoleSerializer,
|
||||
)
|
||||
|
||||
from plane.app.permissions import (
|
||||
ProjectMemberPermission,
|
||||
ProjectLitePermission,
|
||||
WorkspaceUserPermission,
|
||||
)
|
||||
from plane.app.permissions import WorkspaceUserPermission
|
||||
|
||||
from plane.db.models import Project, ProjectMember, IssueUserProperty, WorkspaceMember
|
||||
from plane.bgtasks.project_add_user_email_task import project_add_user_email
|
||||
@@ -26,14 +22,6 @@ class ProjectMemberViewSet(BaseViewSet):
|
||||
serializer_class = ProjectMemberAdminSerializer
|
||||
model = ProjectMember
|
||||
|
||||
def get_permissions(self):
|
||||
if self.action == "leave":
|
||||
self.permission_classes = [ProjectLitePermission]
|
||||
else:
|
||||
self.permission_classes = [ProjectMemberPermission]
|
||||
|
||||
return super(ProjectMemberViewSet, self).get_permissions()
|
||||
|
||||
search_fields = ["member__display_name", "member__first_name"]
|
||||
|
||||
def get_queryset(self):
|
||||
@@ -187,12 +175,20 @@ class ProjectMemberViewSet(BaseViewSet):
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@allow_permission([ROLE.ADMIN])
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
project_member = ProjectMember.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, is_active=True
|
||||
)
|
||||
if request.user.id == project_member.member_id:
|
||||
|
||||
# Fetch the workspace role of the project member
|
||||
workspace_role = WorkspaceMember.objects.get(
|
||||
workspace__slug=slug, member=project_member.member, is_active=True
|
||||
).role
|
||||
is_workspace_admin = workspace_role == ROLE.ADMIN.value
|
||||
|
||||
# Check if the user is not editing their own role if they are not an admin
|
||||
if request.user.id == project_member.member_id and not is_workspace_admin:
|
||||
return Response(
|
||||
{"error": "You cannot update your own role"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -205,9 +201,6 @@ class ProjectMemberViewSet(BaseViewSet):
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
workspace_role = WorkspaceMember.objects.get(
|
||||
workspace__slug=slug, member=project_member.member, is_active=True
|
||||
).role
|
||||
if workspace_role in [5] and int(
|
||||
request.data.get("role", project_member.role)
|
||||
) in [15, 20]:
|
||||
@@ -222,6 +215,7 @@ class ProjectMemberViewSet(BaseViewSet):
|
||||
"role" in request.data
|
||||
and int(request.data.get("role", project_member.role))
|
||||
> requested_project_member.role
|
||||
and not is_workspace_admin
|
||||
):
|
||||
return Response(
|
||||
{"error": "You cannot update a role that is higher than your own role"},
|
||||
|
||||
@@ -34,6 +34,7 @@ from plane.db.models import (
|
||||
IssueView,
|
||||
ProjectMember,
|
||||
ProjectPage,
|
||||
WorkspaceMember,
|
||||
)
|
||||
|
||||
|
||||
@@ -252,214 +253,463 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
|
||||
|
||||
class SearchEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, project_id):
|
||||
def get(self, request, slug):
|
||||
query = request.query_params.get("query", False)
|
||||
query_types = request.query_params.get("query_type", "user_mention").split(",")
|
||||
query_types = [qt.strip() for qt in query_types]
|
||||
count = int(request.query_params.get("count", 5))
|
||||
project_id = request.query_params.get("project_id", None)
|
||||
issue_id = request.query_params.get("issue_id", None)
|
||||
|
||||
response_data = {}
|
||||
|
||||
for query_type in query_types:
|
||||
if query_type == "user_mention":
|
||||
fields = [
|
||||
"member__first_name",
|
||||
"member__last_name",
|
||||
"member__display_name",
|
||||
]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
users = (
|
||||
ProjectMember.objects.filter(
|
||||
q, is_active=True, project_id=project_id, workspace__slug=slug
|
||||
)
|
||||
.annotate(
|
||||
member__avatar_url=Case(
|
||||
When(
|
||||
member__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"member__avatar_asset",
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
When(
|
||||
member__avatar_asset__isnull=True, then="member__avatar"
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.values("member__avatar_url", "member__display_name", "member__id")[
|
||||
:count
|
||||
if project_id:
|
||||
for query_type in query_types:
|
||||
if query_type == "user_mention":
|
||||
fields = [
|
||||
"member__first_name",
|
||||
"member__last_name",
|
||||
"member__display_name",
|
||||
]
|
||||
)
|
||||
response_data["user_mention"] = list(users)
|
||||
q = Q()
|
||||
|
||||
elif query_type == "project":
|
||||
fields = ["name", "identifier"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
projects = (
|
||||
Project.objects.filter(
|
||||
q,
|
||||
Q(project_projectmember__member=self.request.user)
|
||||
| Q(network=2),
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name", "id", "identifier", "logo_props", "workspace__slug"
|
||||
)[:count]
|
||||
)
|
||||
response_data["project"] = list(projects)
|
||||
|
||||
elif query_type == "issue":
|
||||
fields = ["name", "sequence_id", "project__identifier"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
if field == "sequence_id":
|
||||
sequences = re.findall(r"\b\d+\b", query)
|
||||
for sequence_id in sequences:
|
||||
q |= Q(**{"sequence_id": sequence_id})
|
||||
else:
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
issues = (
|
||||
Issue.issue_objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"priority",
|
||||
"state_id",
|
||||
"type_id",
|
||||
)[:count]
|
||||
)
|
||||
response_data["issue"] = list(issues)
|
||||
|
||||
elif query_type == "cycle":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
cycles = (
|
||||
Cycle.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.annotate(
|
||||
status=Case(
|
||||
When(
|
||||
Q(start_date__lte=timezone.now())
|
||||
& Q(end_date__gte=timezone.now()),
|
||||
then=Value("CURRENT"),
|
||||
),
|
||||
When(start_date__gt=timezone.now(), then=Value("UPCOMING")),
|
||||
When(end_date__lt=timezone.now(), then=Value("COMPLETED")),
|
||||
When(
|
||||
Q(start_date__isnull=True) & Q(end_date__isnull=True),
|
||||
then=Value("DRAFT"),
|
||||
),
|
||||
default=Value("DRAFT"),
|
||||
output_field=CharField(),
|
||||
users = (
|
||||
ProjectMember.objects.filter(
|
||||
q,
|
||||
is_active=True,
|
||||
workspace__slug=slug,
|
||||
member__is_bot=False,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(
|
||||
member__avatar_url=Case(
|
||||
When(
|
||||
member__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"member__avatar_asset",
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
When(
|
||||
member__avatar_asset__isnull=True,
|
||||
then="member__avatar",
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=CharField(),
|
||||
)
|
||||
)
|
||||
.order_by("-created_at")
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"status",
|
||||
"workspace__slug",
|
||||
)[:count]
|
||||
)
|
||||
response_data["cycle"] = list(cycles)
|
||||
|
||||
elif query_type == "module":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
if issue_id:
|
||||
issue_created_by = (
|
||||
Issue.objects.filter(id=issue_id)
|
||||
.values_list("created_by_id", flat=True)
|
||||
.first()
|
||||
)
|
||||
users = (
|
||||
users.filter(Q(role__gt=10) | Q(member_id=issue_created_by))
|
||||
.distinct()
|
||||
.values(
|
||||
"member__avatar_url",
|
||||
"member__display_name",
|
||||
"member__id",
|
||||
)
|
||||
)
|
||||
else:
|
||||
users = (
|
||||
users.filter(Q(role__gt=10))
|
||||
.distinct()
|
||||
.values(
|
||||
"member__avatar_url",
|
||||
"member__display_name",
|
||||
"member__id",
|
||||
)
|
||||
)
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
response_data["user_mention"] = list(users[:count])
|
||||
|
||||
modules = (
|
||||
Module.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
elif query_type == "project":
|
||||
fields = ["name", "identifier"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
projects = (
|
||||
Project.objects.filter(
|
||||
q,
|
||||
Q(project_projectmember__member=self.request.user)
|
||||
| Q(network=2),
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name", "id", "identifier", "logo_props", "workspace__slug"
|
||||
)[:count]
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"status",
|
||||
"workspace__slug",
|
||||
)[:count]
|
||||
)
|
||||
response_data["module"] = list(modules)
|
||||
response_data["project"] = list(projects)
|
||||
|
||||
elif query_type == "page":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
elif query_type == "issue":
|
||||
fields = ["name", "sequence_id", "project__identifier"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
if query:
|
||||
for field in fields:
|
||||
if field == "sequence_id":
|
||||
sequences = re.findall(r"\b\d+\b", query)
|
||||
for sequence_id in sequences:
|
||||
q |= Q(**{"sequence_id": sequence_id})
|
||||
else:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
pages = (
|
||||
Page.objects.filter(
|
||||
q,
|
||||
projects__project_projectmember__member=self.request.user,
|
||||
projects__project_projectmember__is_active=True,
|
||||
projects__id=project_id,
|
||||
workspace__slug=slug,
|
||||
access=0,
|
||||
issues = (
|
||||
Issue.issue_objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"priority",
|
||||
"state_id",
|
||||
"type_id",
|
||||
)[:count]
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name", "id", "logo_props", "projects__id", "workspace__slug"
|
||||
)[:count]
|
||||
)
|
||||
response_data["page"] = list(pages)
|
||||
response_data["issue"] = list(issues)
|
||||
|
||||
else:
|
||||
return Response(
|
||||
{"error": f"Invalid query type: {query_type}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
elif query_type == "cycle":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
return Response(response_data, status=status.HTTP_200_OK)
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
cycles = (
|
||||
Cycle.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(
|
||||
status=Case(
|
||||
When(
|
||||
Q(start_date__lte=timezone.now())
|
||||
& Q(end_date__gte=timezone.now()),
|
||||
then=Value("CURRENT"),
|
||||
),
|
||||
When(
|
||||
start_date__gt=timezone.now(),
|
||||
then=Value("UPCOMING"),
|
||||
),
|
||||
When(
|
||||
end_date__lt=timezone.now(), then=Value("COMPLETED")
|
||||
),
|
||||
When(
|
||||
Q(start_date__isnull=True)
|
||||
& Q(end_date__isnull=True),
|
||||
then=Value("DRAFT"),
|
||||
),
|
||||
default=Value("DRAFT"),
|
||||
output_field=CharField(),
|
||||
)
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"status",
|
||||
"workspace__slug",
|
||||
)[:count]
|
||||
)
|
||||
response_data["cycle"] = list(cycles)
|
||||
|
||||
elif query_type == "module":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
modules = (
|
||||
Module.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"status",
|
||||
"workspace__slug",
|
||||
)[:count]
|
||||
)
|
||||
response_data["module"] = list(modules)
|
||||
|
||||
elif query_type == "page":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
pages = (
|
||||
Page.objects.filter(
|
||||
q,
|
||||
projects__project_projectmember__member=self.request.user,
|
||||
projects__project_projectmember__is_active=True,
|
||||
projects__id=project_id,
|
||||
workspace__slug=slug,
|
||||
access=0,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"logo_props",
|
||||
"projects__id",
|
||||
"workspace__slug",
|
||||
)[:count]
|
||||
)
|
||||
response_data["page"] = list(pages)
|
||||
return Response(response_data, status=status.HTTP_200_OK)
|
||||
|
||||
else:
|
||||
for query_type in query_types:
|
||||
if query_type == "user_mention":
|
||||
fields = [
|
||||
"member__first_name",
|
||||
"member__last_name",
|
||||
"member__display_name",
|
||||
]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
users = (
|
||||
WorkspaceMember.objects.filter(
|
||||
q,
|
||||
is_active=True,
|
||||
workspace__slug=slug,
|
||||
member__is_bot=False,
|
||||
)
|
||||
.annotate(
|
||||
member__avatar_url=Case(
|
||||
When(
|
||||
member__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"member__avatar_asset",
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
When(
|
||||
member__avatar_asset__isnull=True,
|
||||
then="member__avatar",
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.values(
|
||||
"member__avatar_url", "member__display_name", "member__id"
|
||||
)[:count]
|
||||
)
|
||||
response_data["user_mention"] = list(users)
|
||||
|
||||
elif query_type == "project":
|
||||
fields = ["name", "identifier"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
projects = (
|
||||
Project.objects.filter(
|
||||
q,
|
||||
Q(project_projectmember__member=self.request.user)
|
||||
| Q(network=2),
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name", "id", "identifier", "logo_props", "workspace__slug"
|
||||
)[:count]
|
||||
)
|
||||
response_data["project"] = list(projects)
|
||||
|
||||
elif query_type == "issue":
|
||||
fields = ["name", "sequence_id", "project__identifier"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
if field == "sequence_id":
|
||||
sequences = re.findall(r"\b\d+\b", query)
|
||||
for sequence_id in sequences:
|
||||
q |= Q(**{"sequence_id": sequence_id})
|
||||
else:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
issues = (
|
||||
Issue.issue_objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"priority",
|
||||
"state_id",
|
||||
"type_id",
|
||||
)[:count]
|
||||
)
|
||||
response_data["issue"] = list(issues)
|
||||
|
||||
elif query_type == "cycle":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
cycles = (
|
||||
Cycle.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.annotate(
|
||||
status=Case(
|
||||
When(
|
||||
Q(start_date__lte=timezone.now())
|
||||
& Q(end_date__gte=timezone.now()),
|
||||
then=Value("CURRENT"),
|
||||
),
|
||||
When(
|
||||
start_date__gt=timezone.now(),
|
||||
then=Value("UPCOMING"),
|
||||
),
|
||||
When(
|
||||
end_date__lt=timezone.now(), then=Value("COMPLETED")
|
||||
),
|
||||
When(
|
||||
Q(start_date__isnull=True)
|
||||
& Q(end_date__isnull=True),
|
||||
then=Value("DRAFT"),
|
||||
),
|
||||
default=Value("DRAFT"),
|
||||
output_field=CharField(),
|
||||
)
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"status",
|
||||
"workspace__slug",
|
||||
)[:count]
|
||||
)
|
||||
response_data["cycle"] = list(cycles)
|
||||
|
||||
elif query_type == "module":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
modules = (
|
||||
Module.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"status",
|
||||
"workspace__slug",
|
||||
)[:count]
|
||||
)
|
||||
response_data["module"] = list(modules)
|
||||
|
||||
elif query_type == "page":
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
|
||||
if query:
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
pages = (
|
||||
Page.objects.filter(
|
||||
q,
|
||||
projects__project_projectmember__member=self.request.user,
|
||||
projects__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
access=0,
|
||||
is_global=True,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"logo_props",
|
||||
"projects__id",
|
||||
"workspace__slug",
|
||||
)[:count]
|
||||
)
|
||||
response_data["page"] = list(pages)
|
||||
return Response(response_data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
# Python imports
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Q
|
||||
|
||||
@@ -9,7 +7,7 @@ from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from .base import BaseAPIView
|
||||
from plane.db.models import Issue, ProjectMember
|
||||
from plane.db.models import Issue, ProjectMember, IssueRelation
|
||||
from plane.utils.issue_search import search_issues
|
||||
|
||||
|
||||
@@ -47,17 +45,18 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
)
|
||||
if issue_relation == "true" and issue_id:
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
related_issue_ids = IssueRelation.objects.filter(
|
||||
Q(related_issue=issue) | Q(issue=issue)
|
||||
).values_list(
|
||||
"issue_id", "related_issue_id"
|
||||
).distinct()
|
||||
|
||||
related_issue_ids = [item for sublist in related_issue_ids for item in sublist]
|
||||
|
||||
if issue:
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id),
|
||||
~(
|
||||
Q(issue_related__issue=issue)
|
||||
& Q(issue_related__deleted_at__isnull=True)
|
||||
),
|
||||
~(
|
||||
Q(issue_relation__related_issue=issue)
|
||||
& Q(issue_relation__deleted_at__isnull=True)
|
||||
),
|
||||
~Q(pk__in=related_issue_ids),
|
||||
)
|
||||
if sub_issue == "true" and issue_id:
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
# Python imports
|
||||
from itertools import groupby
|
||||
|
||||
# Django imports
|
||||
from django.db.utils import IntegrityError
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
@@ -37,11 +40,36 @@ class StateViewSet(BaseViewSet):
|
||||
@invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False)
|
||||
@allow_permission([ROLE.ADMIN])
|
||||
def create(self, request, slug, project_id):
|
||||
serializer = StateSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(project_id=project_id)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
serializer = StateSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(project_id=project_id)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except IntegrityError as e:
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The state name is already taken"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
try:
|
||||
state = State.objects.get(
|
||||
pk=pk, project_id=project_id, workspace__slug=slug
|
||||
)
|
||||
serializer = StateSerializer(state, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except IntegrityError as e:
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The state name is already taken"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def list(self, request, slug, project_id):
|
||||
|
||||
@@ -21,221 +21,161 @@ class TimezoneEndpoint(APIView):
|
||||
|
||||
throttle_classes = [AuthenticationThrottle]
|
||||
|
||||
@method_decorator(cache_page(60 * 60 * 24))
|
||||
@method_decorator(cache_page(60 * 60 * 2))
|
||||
def get(self, request):
|
||||
timezone_mapping = {
|
||||
"-1100": [
|
||||
("Midway Island", "Pacific/Midway"),
|
||||
("American Samoa", "Pacific/Pago_Pago"),
|
||||
],
|
||||
"-1000": [
|
||||
("Hawaii", "Pacific/Honolulu"),
|
||||
("Aleutian Islands", "America/Adak"),
|
||||
],
|
||||
"-0930": [("Marquesas Islands", "Pacific/Marquesas")],
|
||||
"-0900": [
|
||||
("Alaska", "America/Anchorage"),
|
||||
("Gambier Islands", "Pacific/Gambier"),
|
||||
],
|
||||
"-0800": [
|
||||
("Pacific Time (US and Canada)", "America/Los_Angeles"),
|
||||
("Baja California", "America/Tijuana"),
|
||||
],
|
||||
"-0700": [
|
||||
("Mountain Time (US and Canada)", "America/Denver"),
|
||||
("Arizona", "America/Phoenix"),
|
||||
("Chihuahua, Mazatlan", "America/Chihuahua"),
|
||||
],
|
||||
"-0600": [
|
||||
("Central Time (US and Canada)", "America/Chicago"),
|
||||
("Saskatchewan", "America/Regina"),
|
||||
("Guadalajara, Mexico City, Monterrey", "America/Mexico_City"),
|
||||
("Tegucigalpa, Honduras", "America/Tegucigalpa"),
|
||||
("Costa Rica", "America/Costa_Rica"),
|
||||
],
|
||||
"-0500": [
|
||||
("Eastern Time (US and Canada)", "America/New_York"),
|
||||
("Lima", "America/Lima"),
|
||||
("Bogota", "America/Bogota"),
|
||||
("Quito", "America/Guayaquil"),
|
||||
("Chetumal", "America/Cancun"),
|
||||
],
|
||||
"-0430": [("Caracas (Old Venezuela Time)", "America/Caracas")],
|
||||
"-0400": [
|
||||
("Atlantic Time (Canada)", "America/Halifax"),
|
||||
("Caracas", "America/Caracas"),
|
||||
("Santiago", "America/Santiago"),
|
||||
("La Paz", "America/La_Paz"),
|
||||
("Manaus", "America/Manaus"),
|
||||
("Georgetown", "America/Guyana"),
|
||||
("Bermuda", "Atlantic/Bermuda"),
|
||||
],
|
||||
"-0330": [("Newfoundland Time (Canada)", "America/St_Johns")],
|
||||
"-0300": [
|
||||
("Buenos Aires", "America/Argentina/Buenos_Aires"),
|
||||
("Brasilia", "America/Sao_Paulo"),
|
||||
("Greenland", "America/Godthab"),
|
||||
("Montevideo", "America/Montevideo"),
|
||||
("Falkland Islands", "Atlantic/Stanley"),
|
||||
],
|
||||
"-0200": [
|
||||
(
|
||||
"South Georgia and the South Sandwich Islands",
|
||||
"Atlantic/South_Georgia",
|
||||
)
|
||||
],
|
||||
"-0100": [
|
||||
("Azores", "Atlantic/Azores"),
|
||||
("Cape Verde Islands", "Atlantic/Cape_Verde"),
|
||||
],
|
||||
"+0000": [
|
||||
("Dublin", "Europe/Dublin"),
|
||||
("Reykjavik", "Atlantic/Reykjavik"),
|
||||
("Lisbon", "Europe/Lisbon"),
|
||||
("Monrovia", "Africa/Monrovia"),
|
||||
("Casablanca", "Africa/Casablanca"),
|
||||
],
|
||||
"+0100": [
|
||||
("Central European Time (Berlin, Rome, Paris)", "Europe/Paris"),
|
||||
("West Central Africa", "Africa/Lagos"),
|
||||
("Algiers", "Africa/Algiers"),
|
||||
("Lagos", "Africa/Lagos"),
|
||||
("Tunis", "Africa/Tunis"),
|
||||
],
|
||||
"+0200": [
|
||||
("Eastern European Time (Cairo, Helsinki, Kyiv)", "Europe/Kiev"),
|
||||
("Athens", "Europe/Athens"),
|
||||
("Jerusalem", "Asia/Jerusalem"),
|
||||
("Johannesburg", "Africa/Johannesburg"),
|
||||
("Harare, Pretoria", "Africa/Harare"),
|
||||
],
|
||||
"+0300": [
|
||||
("Moscow Time", "Europe/Moscow"),
|
||||
("Baghdad", "Asia/Baghdad"),
|
||||
("Nairobi", "Africa/Nairobi"),
|
||||
("Kuwait, Riyadh", "Asia/Riyadh"),
|
||||
],
|
||||
"+0330": [("Tehran", "Asia/Tehran")],
|
||||
"+0400": [
|
||||
("Abu Dhabi", "Asia/Dubai"),
|
||||
("Baku", "Asia/Baku"),
|
||||
("Yerevan", "Asia/Yerevan"),
|
||||
("Astrakhan", "Europe/Astrakhan"),
|
||||
("Tbilisi", "Asia/Tbilisi"),
|
||||
("Mauritius", "Indian/Mauritius"),
|
||||
],
|
||||
"+0500": [
|
||||
("Islamabad", "Asia/Karachi"),
|
||||
("Karachi", "Asia/Karachi"),
|
||||
("Tashkent", "Asia/Tashkent"),
|
||||
("Yekaterinburg", "Asia/Yekaterinburg"),
|
||||
("Maldives", "Indian/Maldives"),
|
||||
("Chagos", "Indian/Chagos"),
|
||||
],
|
||||
"+0530": [
|
||||
("Chennai", "Asia/Kolkata"),
|
||||
("Kolkata", "Asia/Kolkata"),
|
||||
("Mumbai", "Asia/Kolkata"),
|
||||
("New Delhi", "Asia/Kolkata"),
|
||||
("Sri Jayawardenepura", "Asia/Colombo"),
|
||||
],
|
||||
"+0545": [("Kathmandu", "Asia/Kathmandu")],
|
||||
"+0600": [
|
||||
("Dhaka", "Asia/Dhaka"),
|
||||
("Almaty", "Asia/Almaty"),
|
||||
("Bishkek", "Asia/Bishkek"),
|
||||
("Thimphu", "Asia/Thimphu"),
|
||||
],
|
||||
"+0630": [
|
||||
("Yangon (Rangoon)", "Asia/Yangon"),
|
||||
("Cocos Islands", "Indian/Cocos"),
|
||||
],
|
||||
"+0700": [
|
||||
("Bangkok", "Asia/Bangkok"),
|
||||
("Hanoi", "Asia/Ho_Chi_Minh"),
|
||||
("Jakarta", "Asia/Jakarta"),
|
||||
("Novosibirsk", "Asia/Novosibirsk"),
|
||||
("Krasnoyarsk", "Asia/Krasnoyarsk"),
|
||||
],
|
||||
"+0800": [
|
||||
("Beijing", "Asia/Shanghai"),
|
||||
("Singapore", "Asia/Singapore"),
|
||||
("Perth", "Australia/Perth"),
|
||||
("Hong Kong", "Asia/Hong_Kong"),
|
||||
("Ulaanbaatar", "Asia/Ulaanbaatar"),
|
||||
("Palau", "Pacific/Palau"),
|
||||
],
|
||||
"+0845": [("Eucla", "Australia/Eucla")],
|
||||
"+0900": [
|
||||
("Tokyo", "Asia/Tokyo"),
|
||||
("Seoul", "Asia/Seoul"),
|
||||
("Yakutsk", "Asia/Yakutsk"),
|
||||
],
|
||||
"+0930": [
|
||||
("Adelaide", "Australia/Adelaide"),
|
||||
("Darwin", "Australia/Darwin"),
|
||||
],
|
||||
"+1000": [
|
||||
("Sydney", "Australia/Sydney"),
|
||||
("Brisbane", "Australia/Brisbane"),
|
||||
("Guam", "Pacific/Guam"),
|
||||
("Vladivostok", "Asia/Vladivostok"),
|
||||
("Tahiti", "Pacific/Tahiti"),
|
||||
],
|
||||
"+1030": [("Lord Howe Island", "Australia/Lord_Howe")],
|
||||
"+1100": [
|
||||
("Solomon Islands", "Pacific/Guadalcanal"),
|
||||
("Magadan", "Asia/Magadan"),
|
||||
("Norfolk Island", "Pacific/Norfolk"),
|
||||
("Bougainville Island", "Pacific/Bougainville"),
|
||||
("Chokurdakh", "Asia/Srednekolymsk"),
|
||||
],
|
||||
"+1200": [
|
||||
("Auckland", "Pacific/Auckland"),
|
||||
("Wellington", "Pacific/Auckland"),
|
||||
("Fiji Islands", "Pacific/Fiji"),
|
||||
("Anadyr", "Asia/Anadyr"),
|
||||
],
|
||||
"+1245": [("Chatham Islands", "Pacific/Chatham")],
|
||||
"+1300": [("Nuku'alofa", "Pacific/Tongatapu"), ("Samoa", "Pacific/Apia")],
|
||||
"+1400": [("Kiritimati Island", "Pacific/Kiritimati")],
|
||||
}
|
||||
timezone_locations = [
|
||||
('Midway Island', 'Pacific/Midway'), # UTC-11:00
|
||||
('American Samoa', 'Pacific/Pago_Pago'), # UTC-11:00
|
||||
('Hawaii', 'Pacific/Honolulu'), # UTC-10:00
|
||||
('Aleutian Islands', 'America/Adak'), # UTC-10:00 (DST: UTC-09:00)
|
||||
('Marquesas Islands', 'Pacific/Marquesas'), # UTC-09:30
|
||||
('Alaska', 'America/Anchorage'), # UTC-09:00 (DST: UTC-08:00)
|
||||
('Gambier Islands', 'Pacific/Gambier'), # UTC-09:00
|
||||
('Pacific Time (US and Canada)', 'America/Los_Angeles'), # UTC-08:00 (DST: UTC-07:00)
|
||||
('Baja California', 'America/Tijuana'), # UTC-08:00 (DST: UTC-07:00)
|
||||
('Mountain Time (US and Canada)', 'America/Denver'), # UTC-07:00 (DST: UTC-06:00)
|
||||
('Arizona', 'America/Phoenix'), # UTC-07:00
|
||||
('Chihuahua, Mazatlan', 'America/Chihuahua'), # UTC-07:00 (DST: UTC-06:00)
|
||||
('Central Time (US and Canada)', 'America/Chicago'), # UTC-06:00 (DST: UTC-05:00)
|
||||
('Saskatchewan', 'America/Regina'), # UTC-06:00
|
||||
('Guadalajara, Mexico City, Monterrey', 'America/Mexico_City'), # UTC-06:00 (DST: UTC-05:00)
|
||||
('Tegucigalpa, Honduras', 'America/Tegucigalpa'), # UTC-06:00
|
||||
('Costa Rica', 'America/Costa_Rica'), # UTC-06:00
|
||||
('Eastern Time (US and Canada)', 'America/New_York'), # UTC-05:00 (DST: UTC-04:00)
|
||||
('Lima', 'America/Lima'), # UTC-05:00
|
||||
('Bogota', 'America/Bogota'), # UTC-05:00
|
||||
('Quito', 'America/Guayaquil'), # UTC-05:00
|
||||
('Chetumal', 'America/Cancun'), # UTC-05:00 (DST: UTC-04:00)
|
||||
('Caracas (Old Venezuela Time)', 'America/Caracas'), # UTC-04:30
|
||||
('Atlantic Time (Canada)', 'America/Halifax'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('Caracas', 'America/Caracas'), # UTC-04:00
|
||||
('Santiago', 'America/Santiago'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('La Paz', 'America/La_Paz'), # UTC-04:00
|
||||
('Manaus', 'America/Manaus'), # UTC-04:00
|
||||
('Georgetown', 'America/Guyana'), # UTC-04:00
|
||||
('Bermuda', 'Atlantic/Bermuda'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('Newfoundland Time (Canada)', 'America/St_Johns'), # UTC-03:30 (DST: UTC-02:30)
|
||||
('Buenos Aires', 'America/Argentina/Buenos_Aires'), # UTC-03:00
|
||||
('Brasilia', 'America/Sao_Paulo'), # UTC-03:00
|
||||
('Greenland', 'America/Godthab'), # UTC-03:00 (DST: UTC-02:00)
|
||||
('Montevideo', 'America/Montevideo'), # UTC-03:00
|
||||
('Falkland Islands', 'Atlantic/Stanley'), # UTC-03:00
|
||||
('South Georgia and the South Sandwich Islands', 'Atlantic/South_Georgia'), # UTC-02:00
|
||||
('Azores', 'Atlantic/Azores'), # UTC-01:00 (DST: UTC+00:00)
|
||||
('Cape Verde Islands', 'Atlantic/Cape_Verde'), # UTC-01:00
|
||||
('Dublin', 'Europe/Dublin'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Reykjavik', 'Atlantic/Reykjavik'), # UTC+00:00
|
||||
('Lisbon', 'Europe/Lisbon'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Monrovia', 'Africa/Monrovia'), # UTC+00:00
|
||||
('Casablanca', 'Africa/Casablanca'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Central European Time (Berlin, Rome, Paris)', 'Europe/Paris'), # UTC+01:00 (DST: UTC+02:00)
|
||||
('West Central Africa', 'Africa/Lagos'), # UTC+01:00
|
||||
('Algiers', 'Africa/Algiers'), # UTC+01:00
|
||||
('Lagos', 'Africa/Lagos'), # UTC+01:00
|
||||
('Tunis', 'Africa/Tunis'), # UTC+01:00
|
||||
('Eastern European Time (Cairo, Helsinki, Kyiv)', 'Europe/Kiev'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Athens', 'Europe/Athens'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Jerusalem', 'Asia/Jerusalem'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Johannesburg', 'Africa/Johannesburg'), # UTC+02:00
|
||||
('Harare, Pretoria', 'Africa/Harare'), # UTC+02:00
|
||||
('Moscow Time', 'Europe/Moscow'), # UTC+03:00
|
||||
('Baghdad', 'Asia/Baghdad'), # UTC+03:00
|
||||
('Nairobi', 'Africa/Nairobi'), # UTC+03:00
|
||||
('Kuwait, Riyadh', 'Asia/Riyadh'), # UTC+03:00
|
||||
('Tehran', 'Asia/Tehran'), # UTC+03:30 (DST: UTC+04:30)
|
||||
('Abu Dhabi', 'Asia/Dubai'), # UTC+04:00
|
||||
('Baku', 'Asia/Baku'), # UTC+04:00 (DST: UTC+05:00)
|
||||
('Yerevan', 'Asia/Yerevan'), # UTC+04:00 (DST: UTC+05:00)
|
||||
('Astrakhan', 'Europe/Astrakhan'), # UTC+04:00
|
||||
('Tbilisi', 'Asia/Tbilisi'), # UTC+04:00
|
||||
('Mauritius', 'Indian/Mauritius'), # UTC+04:00
|
||||
('Islamabad', 'Asia/Karachi'), # UTC+05:00
|
||||
('Karachi', 'Asia/Karachi'), # UTC+05:00
|
||||
('Tashkent', 'Asia/Tashkent'), # UTC+05:00
|
||||
('Yekaterinburg', 'Asia/Yekaterinburg'), # UTC+05:00
|
||||
('Maldives', 'Indian/Maldives'), # UTC+05:00
|
||||
('Chagos', 'Indian/Chagos'), # UTC+05:00
|
||||
('Chennai', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Kolkata', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Mumbai', 'Asia/Kolkata'), # UTC+05:30
|
||||
('New Delhi', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Sri Jayawardenepura', 'Asia/Colombo'), # UTC+05:30
|
||||
('Kathmandu', 'Asia/Kathmandu'), # UTC+05:45
|
||||
('Dhaka', 'Asia/Dhaka'), # UTC+06:00
|
||||
('Almaty', 'Asia/Almaty'), # UTC+06:00
|
||||
('Bishkek', 'Asia/Bishkek'), # UTC+06:00
|
||||
('Thimphu', 'Asia/Thimphu'), # UTC+06:00
|
||||
('Yangon (Rangoon)', 'Asia/Yangon'), # UTC+06:30
|
||||
('Cocos Islands', 'Indian/Cocos'), # UTC+06:30
|
||||
('Bangkok', 'Asia/Bangkok'), # UTC+07:00
|
||||
('Hanoi', 'Asia/Ho_Chi_Minh'), # UTC+07:00
|
||||
('Jakarta', 'Asia/Jakarta'), # UTC+07:00
|
||||
('Novosibirsk', 'Asia/Novosibirsk'), # UTC+07:00
|
||||
('Krasnoyarsk', 'Asia/Krasnoyarsk'), # UTC+07:00
|
||||
('Beijing', 'Asia/Shanghai'), # UTC+08:00
|
||||
('Singapore', 'Asia/Singapore'), # UTC+08:00
|
||||
('Perth', 'Australia/Perth'), # UTC+08:00
|
||||
('Hong Kong', 'Asia/Hong_Kong'), # UTC+08:00
|
||||
('Ulaanbaatar', 'Asia/Ulaanbaatar'), # UTC+08:00
|
||||
('Palau', 'Pacific/Palau'), # UTC+08:00
|
||||
('Eucla', 'Australia/Eucla'), # UTC+08:45
|
||||
('Tokyo', 'Asia/Tokyo'), # UTC+09:00
|
||||
('Seoul', 'Asia/Seoul'), # UTC+09:00
|
||||
('Yakutsk', 'Asia/Yakutsk'), # UTC+09:00
|
||||
('Adelaide', 'Australia/Adelaide'), # UTC+09:30 (DST: UTC+10:30)
|
||||
('Darwin', 'Australia/Darwin'), # UTC+09:30
|
||||
('Sydney', 'Australia/Sydney'), # UTC+10:00 (DST: UTC+11:00)
|
||||
('Brisbane', 'Australia/Brisbane'), # UTC+10:00
|
||||
('Guam', 'Pacific/Guam'), # UTC+10:00
|
||||
('Vladivostok', 'Asia/Vladivostok'), # UTC+10:00
|
||||
('Tahiti', 'Pacific/Tahiti'), # UTC+10:00
|
||||
('Lord Howe Island', 'Australia/Lord_Howe'), # UTC+10:30 (DST: UTC+11:00)
|
||||
('Solomon Islands', 'Pacific/Guadalcanal'), # UTC+11:00
|
||||
('Magadan', 'Asia/Magadan'), # UTC+11:00
|
||||
('Norfolk Island', 'Pacific/Norfolk'), # UTC+11:00
|
||||
('Bougainville Island', 'Pacific/Bougainville'), # UTC+11:00
|
||||
('Chokurdakh', 'Asia/Srednekolymsk'), # UTC+11:00
|
||||
('Auckland', 'Pacific/Auckland'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Wellington', 'Pacific/Auckland'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Fiji Islands', 'Pacific/Fiji'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Anadyr', 'Asia/Anadyr'), # UTC+12:00
|
||||
('Chatham Islands', 'Pacific/Chatham'), # UTC+12:45 (DST: UTC+13:45)
|
||||
("Nuku'alofa", 'Pacific/Tongatapu'), # UTC+13:00
|
||||
('Samoa', 'Pacific/Apia'), # UTC+13:00 (DST: UTC+14:00)
|
||||
('Kiritimati Island', 'Pacific/Kiritimati') # UTC+14:00
|
||||
]
|
||||
|
||||
timezone_list = []
|
||||
now = datetime.now()
|
||||
|
||||
# Process timezone mapping
|
||||
for offset, locations in timezone_mapping.items():
|
||||
sign = "-" if offset.startswith("-") else "+"
|
||||
hours = offset[1:3]
|
||||
minutes = offset[3:] if len(offset) > 3 else "00"
|
||||
for friendly_name, tz_identifier in timezone_locations:
|
||||
|
||||
for friendly_name, tz_identifier in locations:
|
||||
try:
|
||||
tz = pytz.timezone(tz_identifier)
|
||||
current_offset = now.astimezone(tz).strftime("%z")
|
||||
try:
|
||||
tz = pytz.timezone(tz_identifier)
|
||||
current_offset = now.astimezone(tz).strftime("%z")
|
||||
|
||||
# converting and formatting UTC offset to GMT offset
|
||||
current_utc_offset = now.astimezone(tz).utcoffset()
|
||||
total_seconds = int(current_utc_offset.total_seconds())
|
||||
hours_offset = total_seconds // 3600
|
||||
minutes_offset = abs(total_seconds % 3600) // 60
|
||||
gmt_offset = (
|
||||
f"GMT{'+' if hours_offset >= 0 else '-'}"
|
||||
f"{abs(hours_offset):02}:{minutes_offset:02}"
|
||||
)
|
||||
# converting and formatting UTC offset to GMT offset
|
||||
current_utc_offset = now.astimezone(tz).utcoffset()
|
||||
total_seconds = int(current_utc_offset.total_seconds())
|
||||
hours_offset = total_seconds // 3600
|
||||
minutes_offset = abs(total_seconds % 3600) // 60
|
||||
offset = (
|
||||
f"{'+' if hours_offset >= 0 else '-'}"
|
||||
f"{abs(hours_offset):02}:{minutes_offset:02}"
|
||||
)
|
||||
|
||||
timezone_value = {
|
||||
"offset": int(current_offset),
|
||||
"utc_offset": f"UTC{sign}{hours}:{minutes}",
|
||||
"gmt_offset": gmt_offset,
|
||||
"value": tz_identifier,
|
||||
"label": f"{friendly_name}",
|
||||
}
|
||||
timezone_value = {
|
||||
"offset": int(current_offset),
|
||||
"utc_offset": f"UTC{offset}",
|
||||
"gmt_offset": f"GMT{offset}",
|
||||
"value": tz_identifier,
|
||||
"label": f"{friendly_name}",
|
||||
}
|
||||
|
||||
timezone_list.append(timezone_value)
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
continue
|
||||
timezone_list.append(timezone_value)
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
continue
|
||||
|
||||
# Sort by offset and then by label
|
||||
timezone_list.sort(key=lambda x: (x["offset"], x["label"]))
|
||||
|
||||
@@ -24,6 +24,7 @@ from plane.db.models import (
|
||||
ProjectMember,
|
||||
Project,
|
||||
CycleIssue,
|
||||
UserRecentVisit,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
@@ -116,7 +117,7 @@ class WorkspaceViewViewSet(BaseViewSet):
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@allow_permission(
|
||||
allowed_roles=[], level="WORKSPACE", creator=True, model=IssueView
|
||||
allowed_roles=[ROLE.ADMIN], level="WORKSPACE", creator=True, model=IssueView
|
||||
)
|
||||
def destroy(self, request, slug, pk):
|
||||
workspace_view = IssueView.objects.get(pk=pk, workspace__slug=slug)
|
||||
@@ -495,6 +496,13 @@ class IssueViewViewSet(BaseViewSet):
|
||||
entity_identifier=pk,
|
||||
entity_type="view",
|
||||
).delete()
|
||||
# Delete the page from recent visit
|
||||
UserRecentVisit.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
entity_identifier=pk,
|
||||
entity_name="view",
|
||||
).delete(soft=False)
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Only admin or owner can delete the view"},
|
||||
|
||||
@@ -120,7 +120,7 @@ class WebhookLogsEndpoint(BaseAPIView):
|
||||
@allow_permission(allowed_roles=[ROLE.ADMIN], level="WORKSPACE")
|
||||
def get(self, request, slug, webhook_id):
|
||||
webhook_logs = WebhookLog.objects.filter(
|
||||
workspace__slug=slug, webhook_id=webhook_id
|
||||
workspace__slug=slug, webhook=webhook_id
|
||||
)
|
||||
serializer = WebhookLogSerializer(webhook_logs, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -7,9 +7,11 @@ from datetime import date
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import Count, F, Func, OuterRef, Prefetch, Q
|
||||
|
||||
from django.db.models.fields import DateField
|
||||
from django.db.models.functions import Cast, ExtractDay, ExtractWeek
|
||||
|
||||
|
||||
# Django imports
|
||||
from django.http import HttpResponse
|
||||
from django.utils import timezone
|
||||
@@ -62,12 +64,6 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
.values("count")
|
||||
)
|
||||
|
||||
issue_count = (
|
||||
Issue.issue_objects.filter(workspace=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
return (
|
||||
self.filter_queryset(super().get_queryset().select_related("owner"))
|
||||
.order_by("name")
|
||||
@@ -76,8 +72,6 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
workspace_member__is_active=True,
|
||||
)
|
||||
.annotate(total_members=member_count)
|
||||
.annotate(total_issues=issue_count)
|
||||
.select_related("owner")
|
||||
)
|
||||
|
||||
def create(self, request):
|
||||
@@ -123,7 +117,14 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
role=20,
|
||||
company_role=request.data.get("company_role", ""),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
# Get total members and role
|
||||
total_members=WorkspaceMember.objects.filter(workspace_id=serializer.data["id"]).count()
|
||||
data = serializer.data
|
||||
data["total_members"] = total_members
|
||||
data["role"] = 20
|
||||
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
return Response(
|
||||
[serializer.errors[error][0] for error in serializer.errors],
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -166,11 +167,9 @@ class UserWorkSpacesEndpoint(BaseAPIView):
|
||||
.values("count")
|
||||
)
|
||||
|
||||
issue_count = (
|
||||
Issue.issue_objects.filter(workspace=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
role = (
|
||||
WorkspaceMember.objects.filter(workspace=OuterRef("id"), member=request.user, is_active=True)
|
||||
.values("role")
|
||||
)
|
||||
|
||||
workspace = (
|
||||
@@ -182,19 +181,19 @@ class UserWorkSpacesEndpoint(BaseAPIView):
|
||||
),
|
||||
)
|
||||
)
|
||||
.select_related("owner")
|
||||
.annotate(total_members=member_count)
|
||||
.annotate(total_issues=issue_count)
|
||||
.annotate(role=role, total_members=member_count)
|
||||
.filter(
|
||||
workspace_member__member=request.user, workspace_member__is_active=True
|
||||
)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
workspaces = WorkSpaceSerializer(
|
||||
self.filter_queryset(workspace),
|
||||
fields=fields if fields else None,
|
||||
many=True,
|
||||
).data
|
||||
|
||||
return Response(workspaces, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ from plane.db.models import (
|
||||
from .. import BaseViewSet
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
model = DraftIssue
|
||||
@@ -241,7 +241,7 @@ class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
if request.data.get("cycle_id", None):
|
||||
@@ -270,7 +270,7 @@ class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
if request.data.get("module_ids", []):
|
||||
@@ -300,7 +300,7 @@ class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for module in request.data.get("module_ids", [])
|
||||
]
|
||||
|
||||
@@ -4,6 +4,7 @@ from rest_framework.response import Response
|
||||
|
||||
# Django modules
|
||||
from django.db.models import Q
|
||||
from django.db import IntegrityError
|
||||
|
||||
# Module imports
|
||||
from plane.app.views.base import BaseAPIView
|
||||
@@ -31,16 +32,37 @@ class WorkspaceFavoriteEndpoint(BaseAPIView):
|
||||
|
||||
@allow_permission(allowed_roles=[ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
|
||||
def post(self, request, slug):
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
serializer = UserFavoriteSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
user_id=request.user.id,
|
||||
workspace=workspace,
|
||||
project_id=request.data.get("project_id", None),
|
||||
try:
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
# If the favorite exists return
|
||||
if request.data.get("entity_identifier"):
|
||||
user_favorites = UserFavorite.objects.filter(
|
||||
workspace=workspace,
|
||||
user_id=request.user.id,
|
||||
entity_type=request.data.get("entity_type"),
|
||||
entity_identifier=request.data.get("entity_identifier"),
|
||||
).first()
|
||||
|
||||
# If the favorite exists return
|
||||
if user_favorites:
|
||||
serializer = UserFavoriteSerializer(user_favorites)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
# else create a new favorite
|
||||
serializer = UserFavoriteSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
user_id=request.user.id,
|
||||
workspace=workspace,
|
||||
project_id=request.data.get("project_id", None),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except IntegrityError:
|
||||
return Response(
|
||||
{"error": "Favorite already exists"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@allow_permission(allowed_roles=[ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
|
||||
def patch(self, request, slug, favorite_id):
|
||||
|
||||
85
apiserver/plane/app/views/workspace/home.py
Normal file
85
apiserver/plane/app/views/workspace/home.py
Normal file
@@ -0,0 +1,85 @@
|
||||
# Module imports
|
||||
from ..base import BaseAPIView
|
||||
from plane.db.models.workspace import WorkspaceHomePreference
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.db.models import Workspace
|
||||
from plane.app.serializers.workspace import WorkspaceHomePreferenceSerializer
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
|
||||
class WorkspaceHomePreferenceViewSet(BaseAPIView):
|
||||
model = WorkspaceHomePreference
|
||||
|
||||
def get_serializer_class(self):
|
||||
return WorkspaceHomePreferenceSerializer
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE")
|
||||
def get(self, request, slug):
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
get_preference = WorkspaceHomePreference.objects.filter(
|
||||
user=request.user, workspace_id=workspace.id
|
||||
)
|
||||
|
||||
create_preference_keys = []
|
||||
|
||||
keys = [
|
||||
key
|
||||
for key, _ in WorkspaceHomePreference.HomeWidgetKeys.choices
|
||||
if key not in ["quick_tutorial", "new_at_plane"]
|
||||
]
|
||||
|
||||
sort_order_counter = 1
|
||||
|
||||
for preference in keys:
|
||||
if preference not in get_preference.values_list("key", flat=True):
|
||||
create_preference_keys.append(preference)
|
||||
|
||||
sort_order = 1000 - sort_order_counter
|
||||
|
||||
preference = WorkspaceHomePreference.objects.bulk_create(
|
||||
[
|
||||
WorkspaceHomePreference(
|
||||
key=key,
|
||||
user=request.user,
|
||||
workspace=workspace,
|
||||
sort_order=sort_order,
|
||||
)
|
||||
for key in create_preference_keys
|
||||
],
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
sort_order_counter += 1
|
||||
|
||||
preference = WorkspaceHomePreference.objects.filter(
|
||||
user=request.user, workspace_id=workspace.id
|
||||
)
|
||||
|
||||
return Response(
|
||||
preference.values("key", "is_enabled", "config", "sort_order"),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE")
|
||||
def patch(self, request, slug, key):
|
||||
preference = WorkspaceHomePreference.objects.filter(
|
||||
key=key, workspace__slug=slug, user=request.user
|
||||
).first()
|
||||
|
||||
if preference:
|
||||
serializer = WorkspaceHomePreferenceSerializer(
|
||||
preference, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
return Response(
|
||||
{"detail": "Preference not found"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user