forked from github/plane
Compare commits
716 Commits
dev-env-fi
...
chore/modu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b76619639f | ||
|
|
c6c631199b | ||
|
|
02ac4cee22 | ||
|
|
93164755e2 | ||
|
|
6344f6f562 | ||
|
|
b67e30fd9c | ||
|
|
93fec2c678 | ||
|
|
c3c6ba9e34 | ||
|
|
d74ec7bda9 | ||
|
|
e593a8d4bd | ||
|
|
abb8782c44 | ||
|
|
0afd72db95 | ||
|
|
65295f6c6f | ||
|
|
5b6b43fb83 | ||
|
|
f8497125db | ||
|
|
b24622e5ef | ||
|
|
10dface85d | ||
|
|
2b6debaa3e | ||
|
|
1750ba344b | ||
|
|
550473bb02 | ||
|
|
fde978861c | ||
|
|
f44d142f2c | ||
|
|
1ded8f486f | ||
|
|
2c43a15515 | ||
|
|
0979acc1a4 | ||
|
|
9003c58d89 | ||
|
|
55e2f00ffe | ||
|
|
08382f88b4 | ||
|
|
72419447ec | ||
|
|
b554087b1f | ||
|
|
07717e9a93 | ||
|
|
df46a45afc | ||
|
|
e1ae0d3b56 | ||
|
|
daa8f7d79b | ||
|
|
d2cdaaccb9 | ||
|
|
6774eddb66 | ||
|
|
8ccc1b3fcc | ||
|
|
5e76e03a55 | ||
|
|
77fb50faa4 | ||
|
|
5ddfee12bc | ||
|
|
f7a596c113 | ||
|
|
f73239be92 | ||
|
|
dc2438b2d3 | ||
|
|
ddd3301d17 | ||
|
|
816f00d956 | ||
|
|
70e2509d52 | ||
|
|
1a9faa025a | ||
|
|
feba1cc4d0 | ||
|
|
079a5b28d8 | ||
|
|
c6eea9c7a9 | ||
|
|
5f5790ebf9 | ||
|
|
a3d99100ee | ||
|
|
ac6d2b0139 | ||
|
|
7becec4ee9 | ||
|
|
cd5e5b96da | ||
|
|
ad4cdcc512 | ||
|
|
6617049983 | ||
|
|
abec46a725 | ||
|
|
88e987b902 | ||
|
|
785a6e8871 | ||
|
|
762ef422ca | ||
|
|
e2b5657c3e | ||
|
|
dbb53a663e | ||
|
|
5c964d144a | ||
|
|
289e81d6eb | ||
|
|
def10af1e2 | ||
|
|
edaeae1b69 | ||
|
|
e06ee25800 | ||
|
|
be86a7d38e | ||
|
|
0a1483c482 | ||
|
|
11abd3cadf | ||
|
|
085cd1960e | ||
|
|
2769a73898 | ||
|
|
8373f20944 | ||
|
|
f562fcd466 | ||
|
|
8c8668a3e6 | ||
|
|
9ce85cdf21 | ||
|
|
1c6cdb8328 | ||
|
|
005b42cb8d | ||
|
|
be062ccd34 | ||
|
|
c9d0c5353d | ||
|
|
2a6eb5fe23 | ||
|
|
d9ccce41bc | ||
|
|
3db69a3a71 | ||
|
|
faa50b0bbb | ||
|
|
1991e09035 | ||
|
|
4fcd081d27 | ||
|
|
5f1209f1db | ||
|
|
88e5a05253 | ||
|
|
981acc81c1 | ||
|
|
cf306ee605 | ||
|
|
9df0ba6e3a | ||
|
|
b6744dcd29 | ||
|
|
a164dfd532 | ||
|
|
2b46e5f977 | ||
|
|
9b4aebc385 | ||
|
|
97c3fb40e7 | ||
|
|
5aad6c71da | ||
|
|
a1ae338c37 | ||
|
|
c16b0daa22 | ||
|
|
9a29896291 | ||
|
|
a66dcb9419 | ||
|
|
87a920174e | ||
|
|
584192faba | ||
|
|
b61adbed4b | ||
|
|
7434800999 | ||
|
|
9828d2332a | ||
|
|
78095e3823 | ||
|
|
f41086fd26 | ||
|
|
0866dc3494 | ||
|
|
9f69fe6060 | ||
|
|
6ea15ced02 | ||
|
|
11525f26d0 | ||
|
|
f3bd1691ce | ||
|
|
d83a76a3aa | ||
|
|
2cd431b4a4 | ||
|
|
d22e4b8212 | ||
|
|
0e0e09c4fd | ||
|
|
a8816ef473 | ||
|
|
d315a24c1c | ||
|
|
e73a4bef4e | ||
|
|
d9339b8f8e | ||
|
|
a66a0680df | ||
|
|
98c7453741 | ||
|
|
1a5faca77c | ||
|
|
6e7fa1a39c | ||
|
|
7a6e742362 | ||
|
|
8a9ff31009 | ||
|
|
d310b8f86f | ||
|
|
4e297d92f3 | ||
|
|
e48147f87e | ||
|
|
85a7a7df2b | ||
|
|
92b22dc99e | ||
|
|
cb4d294608 | ||
|
|
df8504e6f7 | ||
|
|
7287c27b73 | ||
|
|
cc2e6182b6 | ||
|
|
40fd7790eb | ||
|
|
d733fb92cd | ||
|
|
1ae78e55c9 | ||
|
|
ff3f1897bc | ||
|
|
f42f2465a9 | ||
|
|
7ad0466d65 | ||
|
|
e8f748a67d | ||
|
|
81b1405448 | ||
|
|
98d9763f8e | ||
|
|
c9498fa54d | ||
|
|
0586d30a33 | ||
|
|
406b323e8e | ||
|
|
47838a506a | ||
|
|
d9ce042dff | ||
|
|
4fb11cb388 | ||
|
|
6769d1139e | ||
|
|
89e7975821 | ||
|
|
89bf24bd64 | ||
|
|
922735e5f2 | ||
|
|
ed75163ec4 | ||
|
|
8e0124be91 | ||
|
|
c98edd4a91 | ||
|
|
35bb71303e | ||
|
|
30054f71a8 | ||
|
|
f40eb1add1 | ||
|
|
e0affa21c4 | ||
|
|
b14c70df71 | ||
|
|
4c54ca5494 | ||
|
|
10f145f85c | ||
|
|
8930840a76 | ||
|
|
865698bcb4 | ||
|
|
a3678b490a | ||
|
|
5117859142 | ||
|
|
05c923a97f | ||
|
|
bedc3ab5a1 | ||
|
|
0cc4468091 | ||
|
|
5cfea3948f | ||
|
|
c947a6dd64 | ||
|
|
c54b8b9a15 | ||
|
|
0b86080166 | ||
|
|
0e352b7bcb | ||
|
|
bc8be73d6c | ||
|
|
d6f3c2515a | ||
|
|
fd9dcfa2ec | ||
|
|
39274fd5fa | ||
|
|
3d7fe40035 | ||
|
|
ec62308195 | ||
|
|
9c28011ea5 | ||
|
|
10059b2150 | ||
|
|
6fe99c7f3e | ||
|
|
2229d8d828 | ||
|
|
ad410d134f | ||
|
|
9b531aca47 | ||
|
|
2bb842367f | ||
|
|
916fca53ac | ||
|
|
7763cca9a2 | ||
|
|
3ad3cc77f9 | ||
|
|
998fab80b5 | ||
|
|
679c97bbe3 | ||
|
|
c87d70195d | ||
|
|
c2327fa538 | ||
|
|
bc076e69f7 | ||
|
|
1db9030f20 | ||
|
|
737ac33d5d | ||
|
|
fd17c249fd | ||
|
|
afce027bf3 | ||
|
|
6db1db55e9 | ||
|
|
08a025f67c | ||
|
|
8a2cc6f919 | ||
|
|
29b04bb3ef | ||
|
|
f3b09a13b8 | ||
|
|
e83ef7332d | ||
|
|
8ff834c328 | ||
|
|
4ee161bae2 | ||
|
|
27402b52b6 | ||
|
|
479dfc17f5 | ||
|
|
8e70a036b7 | ||
|
|
73b38f4db9 | ||
|
|
e357283789 | ||
|
|
2ce7914b7a | ||
|
|
fe60771943 | ||
|
|
464c13fcd0 | ||
|
|
a7b5ad55ab | ||
|
|
ccbcfecc6d | ||
|
|
7669ee8755 | ||
|
|
fdb7da4d45 | ||
|
|
ff6690afd2 | ||
|
|
f9c3f02d15 | ||
|
|
6c2600efa7 | ||
|
|
0e5c0fe31e | ||
|
|
4424d67073 | ||
|
|
26eb3b59ce | ||
|
|
4d909fbef3 | ||
|
|
89210accae | ||
|
|
7c5c02bba6 | ||
|
|
11faf3f810 | ||
|
|
30ea1adf61 | ||
|
|
546aa40aa3 | ||
|
|
78669363b1 | ||
|
|
bca749986a | ||
|
|
229938114d | ||
|
|
c5e418ab47 | ||
|
|
ecdd1f1d03 | ||
|
|
e687cd6f6a | ||
|
|
9b6721790f | ||
|
|
05df65577a | ||
|
|
52d21b9dda | ||
|
|
51f10d5f36 | ||
|
|
9275e6f373 | ||
|
|
4aef8c2242 | ||
|
|
780573dadd | ||
|
|
5e625ab132 | ||
|
|
34123681cf | ||
|
|
c72ff782ac | ||
|
|
6eb72507a5 | ||
|
|
26b18b431b | ||
|
|
1bae9289f5 | ||
|
|
5c5bcb33e3 | ||
|
|
bed5f76082 | ||
|
|
124c2f772e | ||
|
|
b38898753f | ||
|
|
86a120f11e | ||
|
|
da603dc3f8 | ||
|
|
2f3970f641 | ||
|
|
d759438ebd | ||
|
|
0102f1d693 | ||
|
|
53e443d816 | ||
|
|
98b9957753 | ||
|
|
509af4662d | ||
|
|
d1f2a819f5 | ||
|
|
a42bff675b | ||
|
|
c71a2137e6 | ||
|
|
5e1f0a2604 | ||
|
|
ccab382d7d | ||
|
|
a7aa78a349 | ||
|
|
5ae963c451 | ||
|
|
07c097c9ad | ||
|
|
fc92d7d1a0 | ||
|
|
9ba8f5c21f | ||
|
|
059b8c793a | ||
|
|
93da220c4a | ||
|
|
0feab162ff | ||
|
|
55a1291b1d | ||
|
|
b12a00cf4a | ||
|
|
52ee8c5615 | ||
|
|
68108c9fe9 | ||
|
|
88fbe85087 | ||
|
|
9b423cea4b | ||
|
|
9d891ecce1 | ||
|
|
16a7bd3bda | ||
|
|
6e9f3971a5 | ||
|
|
dddfeb17b7 | ||
|
|
538d67dbd9 | ||
|
|
090870b03e | ||
|
|
0a56a30ab2 | ||
|
|
8df1648329 | ||
|
|
b69c4b6b30 | ||
|
|
e0181342c0 | ||
|
|
f9f8b5c3d9 | ||
|
|
5fadf53580 | ||
|
|
da6ecd439c | ||
|
|
7914bcf486 | ||
|
|
c9a5893c3f | ||
|
|
7361657660 | ||
|
|
60e96bcb72 | ||
|
|
3f3fb373cc | ||
|
|
a829e6fc40 | ||
|
|
864e592bc5 | ||
|
|
411a661abd | ||
|
|
61ad6b9e0e | ||
|
|
2ff49c93bd | ||
|
|
120d06983d | ||
|
|
c9cbca5ec8 | ||
|
|
275942a246 | ||
|
|
a1b09fcbc6 | ||
|
|
26f0e9da00 | ||
|
|
4a2057c0b3 | ||
|
|
82ff786666 | ||
|
|
5773ff2afd | ||
|
|
1403a536c1 | ||
|
|
eba2f3820a | ||
|
|
253edebb93 | ||
|
|
7554988164 | ||
|
|
7087b1b5f2 | ||
|
|
abdb4a4778 | ||
|
|
ad1a074292 | ||
|
|
4c2cb2368a | ||
|
|
a14f8c281b | ||
|
|
d564ea8898 | ||
|
|
7868bfa2b1 | ||
|
|
353c85120f | ||
|
|
49f37e0346 | ||
|
|
3a2f4d55d7 | ||
|
|
8443dfc9dd | ||
|
|
2f8a1dbe20 | ||
|
|
6bfeb6af34 | ||
|
|
fd3f6ab7a4 | ||
|
|
d05d814efe | ||
|
|
cc0701a823 | ||
|
|
3906503c1b | ||
|
|
c3fe221e7a | ||
|
|
e530533f9e | ||
|
|
4c3e2c252a | ||
|
|
4ede04d72f | ||
|
|
5a6fd0efdb | ||
|
|
1a72a0dff4 | ||
|
|
e4ee6a5bfb | ||
|
|
e23e9ccdbb | ||
|
|
54a536e268 | ||
|
|
110eb39a51 | ||
|
|
fc15da826b | ||
|
|
7e2caf65bb | ||
|
|
d62dc25aa0 | ||
|
|
32bf2ed56a | ||
|
|
ebf877b7fa | ||
|
|
d8eeb9e17a | ||
|
|
f2a4e026a5 | ||
|
|
8229b12c9c | ||
|
|
3c07da433a | ||
|
|
1ed7935bf0 | ||
|
|
6394f517a2 | ||
|
|
379d258375 | ||
|
|
c743d43ce2 | ||
|
|
6a27827c16 | ||
|
|
01f465fdfa | ||
|
|
1a668c19a5 | ||
|
|
206ca98307 | ||
|
|
ed0106200e | ||
|
|
473c32d3d4 | ||
|
|
0bdd26ce12 | ||
|
|
124d1c30aa | ||
|
|
df0b7d78e6 | ||
|
|
7fbf0e6358 | ||
|
|
7f1def2041 | ||
|
|
b0cdcfd91e | ||
|
|
f3be2faa8f | ||
|
|
fd7274ba1c | ||
|
|
b01afdf300 | ||
|
|
5914240290 | ||
|
|
3f22ccc528 | ||
|
|
ad3411284b | ||
|
|
ae051b28af | ||
|
|
c89592d587 | ||
|
|
c70fe91886 | ||
|
|
1a48fdd142 | ||
|
|
06ce89a03a | ||
|
|
fd30ea9a20 | ||
|
|
7acd4ef1af | ||
|
|
46d93525be | ||
|
|
56a35e74bb | ||
|
|
5ada1e5397 | ||
|
|
ddaa8df1c5 | ||
|
|
ccbe773ce1 | ||
|
|
160cc014a7 | ||
|
|
ca7d3309d3 | ||
|
|
b87e2fff4c | ||
|
|
428d0dbac9 | ||
|
|
ca799a2b02 | ||
|
|
ff87137e40 | ||
|
|
df41daf71b | ||
|
|
fef83d3153 | ||
|
|
8e094aa895 | ||
|
|
e08fc59114 | ||
|
|
0cb856b92f | ||
|
|
37303e6cb8 | ||
|
|
71b2884b57 | ||
|
|
41b7544cfc | ||
|
|
62392be5a3 | ||
|
|
c2caed8c42 | ||
|
|
048a01dbf3 | ||
|
|
8982452500 | ||
|
|
d3c56c1765 | ||
|
|
9c85704be3 | ||
|
|
f839150741 | ||
|
|
e7bb580289 | ||
|
|
d1e834eb6f | ||
|
|
33cfbbf153 | ||
|
|
bfac39f1bc | ||
|
|
537cd2f5dd | ||
|
|
352c84b026 | ||
|
|
2cef6e67d4 | ||
|
|
a0ae569a68 | ||
|
|
7d29a89eed | ||
|
|
cf8c902473 | ||
|
|
d7097330ef | ||
|
|
7a991720a8 | ||
|
|
ac6fae44e8 | ||
|
|
8496422d20 | ||
|
|
d1d8722525 | ||
|
|
f797bb20f9 | ||
|
|
464c0f2308 | ||
|
|
c9ebc20a8e | ||
|
|
0c3635cf25 | ||
|
|
f2ebac1bb4 | ||
|
|
a3f6d61347 | ||
|
|
1da86b80b2 | ||
|
|
59c0de9b57 | ||
|
|
dbbce439ff | ||
|
|
4c0857233e | ||
|
|
0dfa06e55b | ||
|
|
81f6562168 | ||
|
|
56a4e18a3c | ||
|
|
bfe581d3bd | ||
|
|
78f9028b2f | ||
|
|
885f5deebe | ||
|
|
6b1d20449b | ||
|
|
e9a0eb87cc | ||
|
|
963ccd808d | ||
|
|
1e2c1cac9c | ||
|
|
592fe94cb4 | ||
|
|
02111d779b | ||
|
|
c3aa1cb06d | ||
|
|
e73bd96dbc | ||
|
|
8c9f4b9665 | ||
|
|
be7706e62e | ||
|
|
4083f623a0 | ||
|
|
6f7b563712 | ||
|
|
4b25b7244b | ||
|
|
f774603b7f | ||
|
|
15b5db0cae | ||
|
|
66807bef0d | ||
|
|
49f19c2c44 | ||
|
|
9529ddb393 | ||
|
|
e7af3da115 | ||
|
|
d09f410f21 | ||
|
|
98e6a1366c | ||
|
|
754142afa2 | ||
|
|
42fceb4dcd | ||
|
|
e949c4e130 | ||
|
|
78c1a64690 | ||
|
|
3d5fcbd4ce | ||
|
|
f9cd1b1352 | ||
|
|
18f66805cb | ||
|
|
382a1343ea | ||
|
|
c05eb9e240 | ||
|
|
40d2990565 | ||
|
|
684df96969 | ||
|
|
1f3fdd5d0a | ||
|
|
6f2a38ad66 | ||
|
|
c127353281 | ||
|
|
705371eaf3 | ||
|
|
fae9d8cdc1 | ||
|
|
b6c0ddac50 | ||
|
|
557e96c68e | ||
|
|
7eae6b4c9e | ||
|
|
77e05a3599 | ||
|
|
2511a284eb | ||
|
|
e3da80755c | ||
|
|
58d1d8f132 | ||
|
|
50060a0bf9 | ||
|
|
bffc6a60e7 | ||
|
|
799cf230b7 | ||
|
|
37442f482b | ||
|
|
c234f2a087 | ||
|
|
857879f5ed | ||
|
|
44f8ba407d | ||
|
|
33db616767 | ||
|
|
c949c4d244 | ||
|
|
c8caa925b1 | ||
|
|
26ba4d71c3 | ||
|
|
022960d7e3 | ||
|
|
dce3c3f008 | ||
|
|
ffc6077e9b | ||
|
|
23d08a2ad1 | ||
|
|
1c98f2dca9 | ||
|
|
053ebc031d | ||
|
|
c9dee593eb | ||
|
|
bc8430220b | ||
|
|
030558c026 | ||
|
|
d8c367c51e | ||
|
|
1afb3ba4d2 | ||
|
|
8252b1ccde | ||
|
|
394f0bf555 | ||
|
|
4ce0ac6ea1 | ||
|
|
cd821a934d | ||
|
|
f80b3f1eb1 | ||
|
|
b6321438ce | ||
|
|
16604dd31b | ||
|
|
11b28048bf | ||
|
|
7e5b26ea82 | ||
|
|
5beb50fa76 | ||
|
|
af2d7d6f75 | ||
|
|
e608b58e70 | ||
|
|
a16514ed11 | ||
|
|
74329a49cc | ||
|
|
def391cb76 | ||
|
|
e526a01295 | ||
|
|
0fb4a87454 | ||
|
|
0bd6e53b44 | ||
|
|
f095594be9 | ||
|
|
2e638b28b6 | ||
|
|
aaffe37fbe | ||
|
|
5c632921f8 | ||
|
|
7c9035182b | ||
|
|
b5325f14aa | ||
|
|
4238f89cec | ||
|
|
a44cddb0fc | ||
|
|
83a0c8163f | ||
|
|
c8c195eab4 | ||
|
|
0acee1fe66 | ||
|
|
e5f6be54e0 | ||
|
|
15a846ce06 | ||
|
|
a01e241523 | ||
|
|
cba62f07c0 | ||
|
|
c1f8766571 | ||
|
|
af13a1b00a | ||
|
|
4439740768 | ||
|
|
98a223e5e1 | ||
|
|
e081395857 | ||
|
|
8b527f27d0 | ||
|
|
ae67dc6074 | ||
|
|
e1e9a5ed96 | ||
|
|
9ccc35d181 | ||
|
|
012486df11 | ||
|
|
1fed5f7846 | ||
|
|
8cbe6c4b36 | ||
|
|
d9642eee82 | ||
|
|
ab273f6be3 | ||
|
|
b1f26f322f | ||
|
|
2e4f936dfa | ||
|
|
ddeafc0695 | ||
|
|
406f02737f | ||
|
|
8e9afd459a | ||
|
|
186b5b5500 | ||
|
|
f2c8bdba34 | ||
|
|
e162c88f03 | ||
|
|
7f5fdb9589 | ||
|
|
e3a114cd69 | ||
|
|
ae1eb9527a | ||
|
|
6da4247400 | ||
|
|
4ce5a450d9 | ||
|
|
06618006c2 | ||
|
|
bb79c9de96 | ||
|
|
09cffd5498 | ||
|
|
5916d6e749 | ||
|
|
c3d520aefd | ||
|
|
d41250c1ce | ||
|
|
27626fb16f | ||
|
|
ab695a309f | ||
|
|
3427652c22 | ||
|
|
559b0cc9c8 | ||
|
|
c49b0d6151 | ||
|
|
d99f85ce05 | ||
|
|
e696a3741c | ||
|
|
d575e8ec6b | ||
|
|
c060f7db30 | ||
|
|
84a0f6f77e | ||
|
|
c6d78b5e6a | ||
|
|
8c707cc544 | ||
|
|
abe021071c | ||
|
|
8d6082183e | ||
|
|
7b52fb885d | ||
|
|
d28bc41fbd | ||
|
|
63075a6a0d | ||
|
|
9fdc56db0f | ||
|
|
290318603d | ||
|
|
dbbd9add99 | ||
|
|
37bb183bf0 | ||
|
|
512b8c104d | ||
|
|
bf865f399f | ||
|
|
6a78948113 | ||
|
|
6e9235e5fe | ||
|
|
f2a68874f1 | ||
|
|
9cb3794dde | ||
|
|
1329145173 | ||
|
|
44d49b5500 | ||
|
|
1a534a3c19 | ||
|
|
d7928f853d | ||
|
|
a0553722c9 | ||
|
|
34f4580b94 | ||
|
|
a1d7a4ea55 | ||
|
|
abaa65b4b7 | ||
|
|
fb165d080e | ||
|
|
083562b24c | ||
|
|
88d3fa549a | ||
|
|
c7deb00f2a | ||
|
|
4f2b106852 | ||
|
|
df96d40cfa | ||
|
|
4884ecd668 | ||
|
|
c16c5b1cb2 | ||
|
|
a69593a9e8 | ||
|
|
a1de3f581f | ||
|
|
443878994a | ||
|
|
86cb23777e | ||
|
|
93c105c495 | ||
|
|
b34cf0c471 | ||
|
|
fd96c54b43 | ||
|
|
993cf3faba | ||
|
|
1bf1b63fff | ||
|
|
336220bd98 | ||
|
|
5b0dc43bae | ||
|
|
e0bec31586 | ||
|
|
a2825208b8 | ||
|
|
c3387ba974 | ||
|
|
baa9c30449 | ||
|
|
849e2d658a | ||
|
|
c7f1090914 | ||
|
|
563bb12b9b | ||
|
|
e3d43298df | ||
|
|
b36565298f | ||
|
|
b26e8bd956 | ||
|
|
e5703dbe70 | ||
|
|
498d6d2b02 | ||
|
|
1d22817ede | ||
|
|
483c49d0ff | ||
|
|
0fa9451633 | ||
|
|
46237c5431 | ||
|
|
20e400487f | ||
|
|
99fb3c9bfe | ||
|
|
88200a93bf | ||
|
|
b2ad071608 | ||
|
|
21992f540f | ||
|
|
742731cbe6 | ||
|
|
c6878b9b0f | ||
|
|
887cac5612 | ||
|
|
982566b5b4 | ||
|
|
2dc5655886 | ||
|
|
160b4a4390 | ||
|
|
f187d9512a | ||
|
|
53d3ea1979 | ||
|
|
13d76d4325 | ||
|
|
25338cc804 | ||
|
|
eaa750b025 | ||
|
|
81d9c70026 | ||
|
|
93fb4fe1e9 | ||
|
|
b8e6d072cc | ||
|
|
1220cebe50 | ||
|
|
9464b5c00e | ||
|
|
3fae0f39c0 | ||
|
|
73a757e337 | ||
|
|
bb40b7feb5 | ||
|
|
3175ce9136 | ||
|
|
0b9b4bb289 | ||
|
|
f0f24b6fc4 | ||
|
|
429dffb055 | ||
|
|
6e5c85cd6e | ||
|
|
2adcb163fb | ||
|
|
028a350cd1 | ||
|
|
d021a5696a | ||
|
|
a5c18e37c1 | ||
|
|
8e611664a8 | ||
|
|
3480b450f2 | ||
|
|
a7d9591c44 | ||
|
|
1364c842e0 | ||
|
|
eb99b4adc9 | ||
|
|
6684dd4ab6 | ||
|
|
529ed4432c | ||
|
|
8c1ad69f0c | ||
|
|
c23de32b03 | ||
|
|
83ac1f4e4c | ||
|
|
cee9695a4a | ||
|
|
c9f866e538 | ||
|
|
7d96adcb70 | ||
|
|
c5b034385f | ||
|
|
ff7f31c35b | ||
|
|
7234d6f68b | ||
|
|
d8a5b8d848 | ||
|
|
5412e09701 | ||
|
|
7116acc331 | ||
|
|
ae26b17cab | ||
|
|
2ec8fbab34 | ||
|
|
213dc3f8e8 | ||
|
|
4b02886c40 | ||
|
|
8eddc4b304 | ||
|
|
169a60723b | ||
|
|
c80094581e | ||
|
|
d99f669b89 | ||
|
|
99dd1b9f0c | ||
|
|
48e77ea81b | ||
|
|
0be6738715 | ||
|
|
d041d8be6b | ||
|
|
e53847c59e | ||
|
|
16781a71fe | ||
|
|
fb4535b294 | ||
|
|
d2a58bf04a | ||
|
|
c51407c85e | ||
|
|
3817511024 | ||
|
|
2950877767 | ||
|
|
3d6f2dd3dc |
87
.env.example
Normal file
87
.env.example
Normal file
@@ -0,0 +1,87 @@
|
||||
# Frontend
|
||||
# Extra image domains that need to be added for Next Image
|
||||
NEXT_PUBLIC_EXTRA_IMAGE_DOMAINS=
|
||||
# Google Client ID for Google OAuth
|
||||
NEXT_PUBLIC_GOOGLE_CLIENTID=""
|
||||
# Github ID for Github OAuth
|
||||
NEXT_PUBLIC_GITHUB_ID=""
|
||||
# Github App Name for GitHub Integration
|
||||
NEXT_PUBLIC_GITHUB_APP_NAME=""
|
||||
# Sentry DSN for error monitoring
|
||||
NEXT_PUBLIC_SENTRY_DSN=""
|
||||
# Enable/Disable OAUTH - default 0 for selfhosted instance
|
||||
NEXT_PUBLIC_ENABLE_OAUTH=0
|
||||
# Enable/Disable sentry
|
||||
NEXT_PUBLIC_ENABLE_SENTRY=0
|
||||
# Enable/Disable session recording
|
||||
NEXT_PUBLIC_ENABLE_SESSION_RECORDER=0
|
||||
# Enable/Disable event tracking
|
||||
NEXT_PUBLIC_TRACK_EVENTS=0
|
||||
# Slack for Slack Integration
|
||||
NEXT_PUBLIC_SLACK_CLIENT_ID=""
|
||||
# For Telemetry, set it to "app.plane.so"
|
||||
NEXT_PUBLIC_PLAUSIBLE_DOMAIN=""
|
||||
# public boards deploy url
|
||||
NEXT_PUBLIC_DEPLOY_URL=""
|
||||
|
||||
# Backend
|
||||
# Debug value for api server use it as 0 for production use
|
||||
DEBUG=0
|
||||
|
||||
# Error logs
|
||||
SENTRY_DSN=""
|
||||
|
||||
# Database Settings
|
||||
PGUSER="plane"
|
||||
PGPASSWORD="plane"
|
||||
PGHOST="plane-db"
|
||||
PGDATABASE="plane"
|
||||
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||
|
||||
# Redis Settings
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||
|
||||
# Email Settings
|
||||
EMAIL_HOST=""
|
||||
EMAIL_HOST_USER=""
|
||||
EMAIL_HOST_PASSWORD=""
|
||||
EMAIL_PORT=587
|
||||
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
||||
EMAIL_USE_TLS="1"
|
||||
EMAIL_USE_SSL="0"
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
AWS_SECRET_ACCESS_KEY="secret-key"
|
||||
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
|
||||
# Changing this requires change in the nginx.conf for uploads if using minio setup
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
# Maximum file upload limit
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
|
||||
# GPT settings
|
||||
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
||||
OPENAI_API_KEY="sk-" # add your openai key here
|
||||
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
||||
|
||||
# Github
|
||||
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
||||
|
||||
# Settings related to Docker
|
||||
DOCKERIZED=1
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
|
||||
# Default Creds
|
||||
DEFAULT_EMAIL="captain@plane.so"
|
||||
DEFAULT_PASSWORD="password123"
|
||||
|
||||
# SignUps
|
||||
ENABLE_SIGNUP="1"
|
||||
# Auto generated and Required that will be generated from setup.sh
|
||||
45
.github/workflows/push-image-backend.yml
vendored
45
.github/workflows/push-image-backend.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Build Api Server Docker Image
|
||||
name: Build and Push Backend Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -10,11 +10,8 @@ on:
|
||||
|
||||
jobs:
|
||||
build_push_backend:
|
||||
name: Build Api Server Docker Image
|
||||
name: Build and Push Api Server Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
@@ -28,20 +25,33 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.5.0
|
||||
|
||||
- name: Login to Github Container Registry
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2.1.0
|
||||
with:
|
||||
registry: "ghcr.io"
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.1.0
|
||||
with:
|
||||
registry: "registry.hub.docker.com"
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub)
|
||||
id: ghmeta
|
||||
uses: docker/metadata-action@v4.3.0
|
||||
with:
|
||||
images: makeplane/plane-backend
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker (Github)
|
||||
id: dkrmeta
|
||||
uses: docker/metadata-action@v4.3.0
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}-backend
|
||||
|
||||
- name: Build Api Server
|
||||
- name: Build and Push to GitHub Container Registry
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: ./apiserver
|
||||
@@ -50,5 +60,18 @@ jobs:
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: ${{ steps.ghmeta.outputs.tags }}
|
||||
labels: ${{ steps.ghmeta.outputs.labels }}
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: ./apiserver
|
||||
file: ./apiserver/Dockerfile.api
|
||||
platforms: linux/arm64,linux/amd64
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha
|
||||
tags: ${{ steps.dkrmeta.outputs.tags }}
|
||||
labels: ${{ steps.dkrmeta.outputs.labels }}
|
||||
|
||||
|
||||
39
.github/workflows/push-image-frontend.yml
vendored
39
.github/workflows/push-image-frontend.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Build Frontend Docker Image
|
||||
name: Build and Push Frontend Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -12,9 +12,6 @@ jobs:
|
||||
build_push_frontend:
|
||||
name: Build Frontend Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
@@ -35,13 +32,26 @@ jobs:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.1.0
|
||||
with:
|
||||
registry: "registry.hub.docker.com"
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub)
|
||||
id: ghmeta
|
||||
uses: docker/metadata-action@v4.3.0
|
||||
with:
|
||||
images: makeplane/plane-frontend
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker (Github)
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4.3.0
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}-frontend
|
||||
|
||||
- name: Build Frontend Server
|
||||
- name: Build and Push to GitHub Container Registry
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: .
|
||||
@@ -50,5 +60,18 @@ jobs:
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: ${{ steps.ghmeta.outputs.tags }}
|
||||
labels: ${{ steps.ghmeta.outputs.labels }}
|
||||
|
||||
- name: Build and Push to Docker Container Registry
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: .
|
||||
file: ./apps/app/Dockerfile.web
|
||||
platforms: linux/arm64,linux/amd64
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha
|
||||
tags: ${{ steps.dkrmeta.outputs.tags }}
|
||||
labels: ${{ steps.dkrmeta.outputs.labels }}
|
||||
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -43,6 +43,7 @@ yarn-error.log*
|
||||
|
||||
## Django ##
|
||||
venv
|
||||
.venv
|
||||
*.pyc
|
||||
staticfiles
|
||||
mediafiles
|
||||
@@ -69,4 +70,6 @@ package-lock.json
|
||||
# lock files
|
||||
package-lock.json
|
||||
pnpm-lock.yaml
|
||||
pnpm-workspace.yaml
|
||||
pnpm-workspace.yaml
|
||||
|
||||
.npmrc
|
||||
|
||||
@@ -23,7 +23,6 @@ You can open a new issue with this [issue form](https://github.com/makeplane/pla
|
||||
- Python version 3.8+
|
||||
- Postgres version v14
|
||||
- Redis version v6.2.7
|
||||
- pnpm version 7.22.0
|
||||
|
||||
### Setup the project
|
||||
|
||||
|
||||
43
Dockerfile
43
Dockerfile
@@ -1,8 +1,8 @@
|
||||
FROM node:18-alpine AS builder
|
||||
RUN apk add --no-cache libc6-compat
|
||||
RUN apk update
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER
|
||||
|
||||
RUN yarn global add turbo
|
||||
COPY . .
|
||||
@@ -12,11 +12,9 @@ RUN turbo prune --scope=app --docker
|
||||
# Add lockfile and package.json's of isolated subworkspace
|
||||
FROM node:18-alpine AS installer
|
||||
|
||||
|
||||
RUN apk add --no-cache libc6-compat
|
||||
RUN apk update
|
||||
WORKDIR /app
|
||||
|
||||
ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
|
||||
# First install the dependencies (as they change less often)
|
||||
COPY .gitignore .gitignore
|
||||
COPY --from=builder /app/out/json/ .
|
||||
@@ -26,9 +24,16 @@ RUN yarn install
|
||||
# Build the project
|
||||
COPY --from=builder /app/out/full/ .
|
||||
COPY turbo.json turbo.json
|
||||
COPY replace-env-vars.sh /usr/local/bin/
|
||||
USER root
|
||||
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
||||
|
||||
RUN yarn turbo run build --filter=app
|
||||
|
||||
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
||||
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
||||
|
||||
RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL}
|
||||
|
||||
FROM python:3.11.1-alpine3.17 AS backend
|
||||
|
||||
@@ -36,10 +41,12 @@ FROM python:3.11.1-alpine3.17 AS backend
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
ENV DJANGO_SETTINGS_MODULE plane.settings.production
|
||||
ENV DOCKERIZED 1
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
RUN apk --update --no-cache add \
|
||||
RUN apk --no-cache add \
|
||||
"libpq~=15" \
|
||||
"libxslt~=1.1" \
|
||||
"nodejs-current~=19" \
|
||||
@@ -51,8 +58,8 @@ RUN apk --update --no-cache add \
|
||||
|
||||
COPY apiserver/requirements.txt ./
|
||||
COPY apiserver/requirements ./requirements
|
||||
RUN apk add libffi-dev
|
||||
RUN apk --update --no-cache --virtual .build-deps add \
|
||||
RUN apk add --no-cache libffi-dev
|
||||
RUN apk add --no-cache --virtual .build-deps \
|
||||
"bash~=5.2" \
|
||||
"g++~=12.2" \
|
||||
"gcc~=12.2" \
|
||||
@@ -73,18 +80,13 @@ COPY apiserver/plane plane/
|
||||
COPY apiserver/templates templates/
|
||||
|
||||
COPY apiserver/gunicorn.config.py ./
|
||||
RUN apk --update --no-cache add "bash~=5.2"
|
||||
RUN apk --no-cache add "bash~=5.2"
|
||||
COPY apiserver/bin ./bin/
|
||||
|
||||
RUN chmod +x ./bin/takeoff ./bin/worker
|
||||
RUN chmod -R 777 /code
|
||||
|
||||
# Expose container port and run entry point script
|
||||
EXPOSE 8000
|
||||
EXPOSE 3000
|
||||
EXPOSE 80
|
||||
|
||||
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -108,9 +110,16 @@ COPY nginx/nginx-single-docker-image.conf /etc/nginx/http.d/default.conf
|
||||
|
||||
COPY nginx/supervisor.conf /code/supervisor.conf
|
||||
|
||||
ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
|
||||
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
||||
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
||||
|
||||
USER root
|
||||
COPY replace-env-vars.sh /usr/local/bin/
|
||||
COPY start.sh /usr/local/bin/
|
||||
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
||||
RUN chmod +x /usr/local/bin/start.sh
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
CMD ["supervisord","-c","/code/supervisor.conf"]
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
798
LICENSE.txt
798
LICENSE.txt
@@ -1,201 +1,661 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
1. Definitions.
|
||||
Preamble
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
0. Definitions.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
1. Source Code.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
2. Basic Permissions.
|
||||
|
||||
Copyright 2022 Plane Software Labs Private Limited
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published
|
||||
by the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
99
README.md
99
README.md
@@ -2,7 +2,7 @@
|
||||
|
||||
<p align="center">
|
||||
<a href="https://plane.so">
|
||||
<img src="https://res.cloudinary.com/toolspacedev/image/upload/v1680596414/Plane/Plane_Icon_Blue_on_White_150x150_muysa3.jpg" alt="Plane Logo" width="70">
|
||||
<img src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_logo_.webp" alt="Plane Logo" width="70">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
@@ -11,22 +11,29 @@
|
||||
|
||||
<p align="center">
|
||||
<a href="https://discord.com/invite/A92xrEGCge">
|
||||
<img alt="Discord" src="https://img.shields.io/discord/1031547764020084846?color=5865F2&label=Discord&style=for-the-badge" />
|
||||
<img alt="Discord online members" src="https://img.shields.io/discord/1031547764020084846?color=5865F2&label=Discord&style=for-the-badge" />
|
||||
</a>
|
||||
<img alt="Discord" src="https://img.shields.io/github/commit-activity/m/makeplane/plane?style=for-the-badge" />
|
||||
<img alt="Commit activity per month" src="https://img.shields.io/github/commit-activity/m/makeplane/plane?style=for-the-badge" />
|
||||
</p>
|
||||
<br />
|
||||
|
||||
<p>
|
||||
<a href="https://app.plane.so/" target="_blank">
|
||||
<a href="https://app.plane.so/#gh-light-mode-only" target="_blank">
|
||||
<img
|
||||
src="https://res.cloudinary.com/toolspacedev/image/upload/v1680599798/Plane/plane_1_1_tnb32j.png"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_screen.webp"
|
||||
alt="Plane Screens"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
<a href="https://app.plane.so/#gh-dark-mode-only" target="_blank">
|
||||
<img
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_screens_dark_mode.webp"
|
||||
alt="Plane Screens"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
Meet Plane. An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind 🧘♀️.
|
||||
Meet [Plane](https://plane.so). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind 🧘♀️.
|
||||
|
||||
|
||||
> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve on our upcoming releases.
|
||||
@@ -38,31 +45,36 @@ The easiest way to get started with Plane is by creating a [Plane Cloud](https:/
|
||||
|
||||
### Docker Compose Setup
|
||||
|
||||
- Clone the Repository
|
||||
- Clone the repository
|
||||
|
||||
```bash
|
||||
git clone https://github.com/makeplane/plane
|
||||
```
|
||||
|
||||
- Change Directory
|
||||
|
||||
```bash
|
||||
cd plane
|
||||
chmod +x setup.sh
|
||||
```
|
||||
|
||||
- Run setup.sh
|
||||
|
||||
```bash
|
||||
./setup.sh localhost
|
||||
./setup.sh http://localhost
|
||||
```
|
||||
|
||||
> If running in a cloud env replace localhost with public facing IP address of the VM
|
||||
|
||||
- Setup Tiptap Pro
|
||||
|
||||
Visit [Tiptap Pro](https://collab.tiptap.dev/pro-extensions) and signup (it is free).
|
||||
|
||||
Create a **`.npmrc`** file, copy the following and replace your registry token generated from Tiptap Pro.
|
||||
|
||||
```
|
||||
@tiptap-pro:registry=https://registry.tiptap.dev/
|
||||
//registry.tiptap.dev/:_authToken=YOUR_REGISTRY_TOKEN
|
||||
```
|
||||
- Run Docker compose up
|
||||
|
||||
```bash
|
||||
docker-compose up
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
<strong>You can use the default email and password for your first login `captain@plane.so` and `password123`.</strong>
|
||||
@@ -82,41 +94,62 @@ docker-compose up
|
||||
## 📸 Screenshots
|
||||
|
||||
<p>
|
||||
<a href="https://app.plane.so/" target="_blank">
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://res.cloudinary.com/toolspacedev/image/upload/v1680601719/Plane/plane_2_iqao52.png"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_views_dark_mode.webp"
|
||||
alt="Plane Views"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_issue_detail_dark_mode.webp"
|
||||
alt="Plane Issue Details"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://app.plane.so/" target="_blank">
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://res.cloudinary.com/toolspacedev/image/upload/v1680604273/Plane/plane_5_1_nwsl3a.png"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_cycles_modules_dark_mode.webp"
|
||||
alt="Plane Cycles and Modules"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://app.plane.so/" target="_blank">
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://res.cloudinary.com/toolspacedev/image/upload/v1680601713/Plane/plane_4_cqm0g8.png"
|
||||
alt="Plane Quick Lists"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_analytics_dark_mode.webp"
|
||||
alt="Plane Analytics"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://app.plane.so/" target="_blank">
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://res.cloudinary.com/toolspacedev/image/upload/v1680601712/Plane/plane_3_1_cu4fsc.png"
|
||||
alt="Plane Command K"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_pages_dark_mode.webp"
|
||||
alt="Plane Pages"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
</p>
|
||||
</p>
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_commad_k_dark_mode.webp"
|
||||
alt="Plane Command Menu"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
</p>
|
||||
</p>
|
||||
|
||||
|
||||
## 📚Documentation
|
||||
|
||||
@@ -128,10 +161,10 @@ To see how to Contribute, visit [here](https://github.com/makeplane/plane/blob/m
|
||||
|
||||
The Plane community can be found on GitHub Discussions, where you can ask questions, voice ideas, and share your projects.
|
||||
|
||||
To chat with other community members you can join the [Plane Discord](https://discord.com/invite/q9HKAdau).
|
||||
To chat with other community members you can join the [Plane Discord](https://discord.com/invite/A92xrEGCge).
|
||||
|
||||
Our [Code of Conduct](https://github.com/makeplane/plane/blob/master/CODE_OF_CONDUCT.md) applies to all Plane community channels.
|
||||
|
||||
## ⛓️ Security
|
||||
|
||||
If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports. Email security@plane.so to disclose any security vulnerabilities.
|
||||
If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports. Email engineering@plane.so to disclose any security vulnerabilities.
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
DJANGO_SETTINGS_MODULE="plane.settings.production"
|
||||
# Database
|
||||
DATABASE_URL=postgres://plane:xyzzyspoon@db:5432/plane
|
||||
# Cache
|
||||
REDIS_URL=redis://redis:6379/
|
||||
# SMTP
|
||||
EMAIL_HOST=""
|
||||
EMAIL_HOST_USER=""
|
||||
EMAIL_HOST_PASSWORD=""
|
||||
EMAIL_PORT="587"
|
||||
EMAIL_USE_TLS="1"
|
||||
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
||||
# AWS
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID=""
|
||||
AWS_SECRET_ACCESS_KEY=""
|
||||
AWS_S3_BUCKET_NAME=""
|
||||
AWS_S3_ENDPOINT_URL=""
|
||||
# FE
|
||||
WEB_URL="localhost/"
|
||||
# OAUTH
|
||||
GITHUB_CLIENT_SECRET=""
|
||||
# Flags
|
||||
DISABLE_COLLECTSTATIC=1
|
||||
DOCKERIZED=1
|
||||
# GPT Envs
|
||||
OPENAI_API_KEY=0
|
||||
GPT_ENGINE=0
|
||||
@@ -7,7 +7,7 @@ ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
RUN apk --update --no-cache add \
|
||||
RUN apk --no-cache add \
|
||||
"libpq~=15" \
|
||||
"libxslt~=1.1" \
|
||||
"nodejs-current~=19" \
|
||||
@@ -15,8 +15,8 @@ RUN apk --update --no-cache add \
|
||||
|
||||
COPY requirements.txt ./
|
||||
COPY requirements ./requirements
|
||||
RUN apk add libffi-dev
|
||||
RUN apk --update --no-cache --virtual .build-deps add \
|
||||
RUN apk add --no-cache libffi-dev
|
||||
RUN apk add --no-cache --virtual .build-deps \
|
||||
"bash~=5.2" \
|
||||
"g++~=12.2" \
|
||||
"gcc~=12.2" \
|
||||
@@ -46,10 +46,10 @@ COPY templates templates/
|
||||
|
||||
COPY gunicorn.config.py ./
|
||||
USER root
|
||||
RUN apk --update --no-cache add "bash~=5.2"
|
||||
RUN apk --no-cache add "bash~=5.2"
|
||||
COPY ./bin ./bin/
|
||||
|
||||
RUN chmod +x ./bin/takeoff ./bin/worker
|
||||
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
||||
RUN chmod -R 777 /code
|
||||
|
||||
USER captain
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
web: gunicorn -w 4 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:$PORT --config gunicorn.config.py --max-requests 10000 --max-requests-jitter 1000 --access-logfile -
|
||||
worker: celery -A plane worker -l info
|
||||
worker: celery -A plane worker -l info
|
||||
beat: celery -A plane beat -l INFO
|
||||
@@ -3,7 +3,15 @@ import uuid
|
||||
import random
|
||||
from django.contrib.auth.hashers import make_password
|
||||
from plane.db.models import ProjectIdentifier
|
||||
from plane.db.models import Issue, IssueComment, User, Project, ProjectMember, Label
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueComment,
|
||||
User,
|
||||
Project,
|
||||
ProjectMember,
|
||||
Label,
|
||||
Integration,
|
||||
)
|
||||
|
||||
|
||||
# Update description and description html values for old descriptions
|
||||
@@ -174,3 +182,43 @@ def update_label_color():
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
|
||||
def create_slack_integration():
|
||||
try:
|
||||
_ = Integration.objects.create(provider="slack", network=2, title="Slack")
|
||||
print("Success")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
|
||||
def update_integration_verified():
|
||||
try:
|
||||
integrations = Integration.objects.all()
|
||||
updated_integrations = []
|
||||
for integration in integrations:
|
||||
integration.verified = True
|
||||
updated_integrations.append(integration)
|
||||
|
||||
Integration.objects.bulk_update(
|
||||
updated_integrations, ["verified"], batch_size=10
|
||||
)
|
||||
print("Success")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
|
||||
def update_start_date():
|
||||
try:
|
||||
issues = Issue.objects.filter(state__group__in=["started", "completed"])
|
||||
updated_issues = []
|
||||
for issue in issues:
|
||||
issue.start_date = issue.created_at.date()
|
||||
updated_issues.append(issue)
|
||||
Issue.objects.bulk_update(updated_issues, ["start_date"], batch_size=500)
|
||||
print("Success")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Failed")
|
||||
|
||||
5
apiserver/bin/beat
Normal file
5
apiserver/bin/beat
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
python manage.py wait_for_db
|
||||
celery -A plane beat -l info
|
||||
@@ -6,4 +6,4 @@ python manage.py migrate
|
||||
# Create a Default User
|
||||
python bin/user_script.py
|
||||
|
||||
exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --config gunicorn.config.py --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||
exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import os, sys
|
||||
import os, sys, random, string
|
||||
import uuid
|
||||
|
||||
sys.path.append("/code")
|
||||
@@ -19,9 +19,9 @@ def populate():
|
||||
user = User.objects.create(email=default_email, username=uuid.uuid4().hex)
|
||||
user.set_password(default_password)
|
||||
user.save()
|
||||
print("User created")
|
||||
|
||||
print("Success")
|
||||
print(f"User created with an email: {default_email}")
|
||||
else:
|
||||
print(f"User already exists with the default email: {default_email}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
from .workspace import WorkSpaceBasePermission, WorkSpaceAdminPermission
|
||||
from .project import ProjectBasePermission, ProjectEntityPermission, ProjectMemberPermission
|
||||
from .workspace import WorkSpaceBasePermission, WorkSpaceAdminPermission, WorkspaceEntityPermission, WorkspaceViewerPermission
|
||||
from .project import ProjectBasePermission, ProjectEntityPermission, ProjectMemberPermission, ProjectLitePermission
|
||||
|
||||
@@ -89,3 +89,16 @@ class ProjectEntityPermission(BasePermission):
|
||||
role__in=[Admin, Member],
|
||||
project_id=view.project_id,
|
||||
).exists()
|
||||
|
||||
|
||||
class ProjectLitePermission(BasePermission):
|
||||
|
||||
def has_permission(self, request, view):
|
||||
if request.user.is_anonymous:
|
||||
return False
|
||||
|
||||
return ProjectMember.objects.filter(
|
||||
workspace__slug=view.workspace_slug,
|
||||
member=request.user,
|
||||
project_id=view.project_id,
|
||||
).exists()
|
||||
@@ -5,7 +5,6 @@ from rest_framework.permissions import BasePermission, SAFE_METHODS
|
||||
from plane.db.models import WorkspaceMember
|
||||
|
||||
|
||||
|
||||
# Permission Mappings
|
||||
Owner = 20
|
||||
Admin = 15
|
||||
@@ -44,7 +43,6 @@ class WorkSpaceBasePermission(BasePermission):
|
||||
|
||||
class WorkSpaceAdminPermission(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
|
||||
if request.user.is_anonymous:
|
||||
return False
|
||||
|
||||
@@ -53,3 +51,23 @@ class WorkSpaceAdminPermission(BasePermission):
|
||||
workspace__slug=view.workspace_slug,
|
||||
role__in=[Owner, Admin],
|
||||
).exists()
|
||||
|
||||
|
||||
class WorkspaceEntityPermission(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
if request.user.is_anonymous:
|
||||
return False
|
||||
|
||||
return WorkspaceMember.objects.filter(
|
||||
member=request.user, workspace__slug=view.workspace_slug
|
||||
).exists()
|
||||
|
||||
|
||||
class WorkspaceViewerPermission(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
if request.user.is_anonymous:
|
||||
return False
|
||||
|
||||
return WorkspaceMember.objects.filter(
|
||||
member=request.user, workspace__slug=view.workspace_slug, role__gte=10
|
||||
).exists()
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
from .base import BaseSerializer
|
||||
from .people import (
|
||||
ChangePasswordSerializer,
|
||||
ResetPasswordSerializer,
|
||||
TokenSerializer,
|
||||
)
|
||||
from .user import UserSerializer, UserLiteSerializer
|
||||
from .user import UserSerializer, UserLiteSerializer, ChangePasswordSerializer, ResetPasswordSerializer, UserAdminLiteSerializer
|
||||
from .workspace import (
|
||||
WorkSpaceSerializer,
|
||||
WorkSpaceMemberSerializer,
|
||||
@@ -12,6 +7,7 @@ from .workspace import (
|
||||
WorkSpaceMemberInviteSerializer,
|
||||
WorkspaceLiteSerializer,
|
||||
WorkspaceThemeSerializer,
|
||||
WorkspaceMemberAdminSerializer,
|
||||
)
|
||||
from .project import (
|
||||
ProjectSerializer,
|
||||
@@ -21,17 +17,18 @@ from .project import (
|
||||
ProjectIdentifierSerializer,
|
||||
ProjectFavoriteSerializer,
|
||||
ProjectLiteSerializer,
|
||||
ProjectMemberLiteSerializer,
|
||||
ProjectDeployBoardSerializer,
|
||||
ProjectMemberAdminSerializer,
|
||||
)
|
||||
from .state import StateSerializer, StateLiteSerializer
|
||||
from .shortcut import ShortCutSerializer
|
||||
from .view import IssueViewSerializer, IssueViewFavoriteSerializer
|
||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer
|
||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer
|
||||
from .asset import FileAssetSerializer
|
||||
from .issue import (
|
||||
IssueCreateSerializer,
|
||||
IssueActivitySerializer,
|
||||
IssueCommentSerializer,
|
||||
TimeLineIssueSerializer,
|
||||
IssuePropertySerializer,
|
||||
BlockerIssueSerializer,
|
||||
BlockedIssueSerializer,
|
||||
@@ -43,6 +40,10 @@ from .issue import (
|
||||
IssueLinkSerializer,
|
||||
IssueLiteSerializer,
|
||||
IssueAttachmentSerializer,
|
||||
IssueSubscriberSerializer,
|
||||
IssueReactionSerializer,
|
||||
CommentReactionSerializer,
|
||||
IssueVoteSerializer,
|
||||
)
|
||||
|
||||
from .module import (
|
||||
@@ -62,10 +63,23 @@ from .integration import (
|
||||
GithubRepositorySerializer,
|
||||
GithubRepositorySyncSerializer,
|
||||
GithubCommentSyncSerializer,
|
||||
SlackProjectSyncSerializer,
|
||||
)
|
||||
|
||||
from .importer import ImporterSerializer
|
||||
|
||||
from .page import PageSerializer, PageBlockSerializer, PageFavoriteSerializer
|
||||
|
||||
from .estimate import EstimateSerializer, EstimatePointSerializer, EstimateReadSerializer
|
||||
from .estimate import (
|
||||
EstimateSerializer,
|
||||
EstimatePointSerializer,
|
||||
EstimateReadSerializer,
|
||||
)
|
||||
|
||||
from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSerializer
|
||||
|
||||
from .analytic import AnalyticViewSerializer
|
||||
|
||||
from .notification import NotificationSerializer
|
||||
|
||||
from .exporter import ExporterHistorySerializer
|
||||
|
||||
30
apiserver/plane/api/serializers/analytic.py
Normal file
30
apiserver/plane/api/serializers/analytic.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import AnalyticView
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
|
||||
class AnalyticViewSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = AnalyticView
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"query",
|
||||
]
|
||||
|
||||
def create(self, validated_data):
|
||||
query_params = validated_data.get("query_dict", {})
|
||||
if bool(query_params):
|
||||
validated_data["query"] = issue_filters(query_params, "POST")
|
||||
else:
|
||||
validated_data["query"] = dict()
|
||||
return AnalyticView.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
query_params = validated_data.get("query_data", {})
|
||||
if bool(query_params):
|
||||
validated_data["query"] = issue_filters(query_params, "POST")
|
||||
else:
|
||||
validated_data["query"] = dict()
|
||||
validated_data["query"] = issue_filters(query_params, "PATCH")
|
||||
return super().update(instance, validated_data)
|
||||
@@ -1,3 +1,6 @@
|
||||
# Django imports
|
||||
from django.db.models.functions import TruncDate
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
@@ -9,6 +12,12 @@ from .workspace import WorkspaceLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from plane.db.models import Cycle, CycleIssue, CycleFavorite
|
||||
|
||||
class CycleWriteSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Cycle
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class CycleSerializer(BaseSerializer):
|
||||
owned_by = UserLiteSerializer(read_only=True)
|
||||
@@ -19,9 +28,50 @@ class CycleSerializer(BaseSerializer):
|
||||
started_issues = serializers.IntegerField(read_only=True)
|
||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_issues = serializers.IntegerField(read_only=True)
|
||||
|
||||
assignees = serializers.SerializerMethodField(read_only=True)
|
||||
labels = serializers.SerializerMethodField(read_only=True)
|
||||
total_estimates = serializers.IntegerField(read_only=True)
|
||||
completed_estimates = serializers.IntegerField(read_only=True)
|
||||
started_estimates = serializers.IntegerField(read_only=True)
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
|
||||
def get_assignees(self, obj):
|
||||
members = [
|
||||
{
|
||||
"avatar": assignee.avatar,
|
||||
"first_name": assignee.first_name,
|
||||
"display_name": assignee.display_name,
|
||||
"id": assignee.id,
|
||||
}
|
||||
for issue_cycle in obj.issue_cycle.all()
|
||||
for assignee in issue_cycle.issue.assignees.all()
|
||||
]
|
||||
# Use a set comprehension to return only the unique objects
|
||||
unique_objects = {frozenset(item.items()) for item in members}
|
||||
|
||||
# Convert the set back to a list of dictionaries
|
||||
unique_list = [dict(item) for item in unique_objects]
|
||||
|
||||
return unique_list
|
||||
|
||||
def get_labels(self, obj):
|
||||
labels = [
|
||||
{
|
||||
"name": label.name,
|
||||
"color": label.color,
|
||||
"id": label.id,
|
||||
}
|
||||
for issue_cycle in obj.issue_cycle.all()
|
||||
for label in issue_cycle.issue.labels.all()
|
||||
]
|
||||
# Use a set comprehension to return only the unique objects
|
||||
unique_objects = {frozenset(item.items()) for item in labels}
|
||||
|
||||
# Convert the set back to a list of dictionaries
|
||||
unique_list = [dict(item) for item in unique_objects]
|
||||
|
||||
return unique_list
|
||||
|
||||
class Meta:
|
||||
model = Cycle
|
||||
|
||||
@@ -2,9 +2,13 @@
|
||||
from .base import BaseSerializer
|
||||
|
||||
from plane.db.models import Estimate, EstimatePoint
|
||||
from plane.api.serializers import WorkspaceLiteSerializer, ProjectLiteSerializer
|
||||
|
||||
|
||||
class EstimateSerializer(BaseSerializer):
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
|
||||
class Meta:
|
||||
model = Estimate
|
||||
fields = "__all__"
|
||||
@@ -27,6 +31,8 @@ class EstimatePointSerializer(BaseSerializer):
|
||||
|
||||
class EstimateReadSerializer(BaseSerializer):
|
||||
points = EstimatePointSerializer(read_only=True, many=True)
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
|
||||
class Meta:
|
||||
model = Estimate
|
||||
|
||||
26
apiserver/plane/api/serializers/exporter.py
Normal file
26
apiserver/plane/api/serializers/exporter.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import ExporterHistory
|
||||
from .user import UserLiteSerializer
|
||||
|
||||
|
||||
class ExporterHistorySerializer(BaseSerializer):
|
||||
initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ExporterHistory
|
||||
fields = [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"project",
|
||||
"provider",
|
||||
"status",
|
||||
"url",
|
||||
"initiated_by",
|
||||
"initiated_by_detail",
|
||||
"token",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = fields
|
||||
@@ -2,12 +2,14 @@
|
||||
from .base import BaseSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from .workspace import WorkspaceLiteSerializer
|
||||
from plane.db.models import Importer
|
||||
|
||||
|
||||
class ImporterSerializer(BaseSerializer):
|
||||
initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True)
|
||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Importer
|
||||
|
||||
58
apiserver/plane/api/serializers/inbox.py
Normal file
58
apiserver/plane/api/serializers/inbox.py
Normal file
@@ -0,0 +1,58 @@
|
||||
# Third party frameworks
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from .state import StateLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from plane.db.models import Inbox, InboxIssue, Issue
|
||||
|
||||
|
||||
class InboxSerializer(BaseSerializer):
|
||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||
pending_issue_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Inbox
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"project",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
|
||||
class InboxIssueSerializer(BaseSerializer):
|
||||
issue_detail = IssueFlatSerializer(source="issue", read_only=True)
|
||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = InboxIssue
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"project",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
|
||||
class InboxIssueLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = InboxIssue
|
||||
fields = ["id", "status", "duplicate_to", "snoozed_till", "source"]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class IssueStateInboxSerializer(BaseSerializer):
|
||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
bridge_id = serializers.UUIDField(read_only=True)
|
||||
issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = "__all__"
|
||||
@@ -5,3 +5,4 @@ from .github import (
|
||||
GithubIssueSyncSerializer,
|
||||
GithubCommentSyncSerializer,
|
||||
)
|
||||
from .slack import SlackProjectSyncSerializer
|
||||
14
apiserver/plane/api/serializers/integration/slack.py
Normal file
14
apiserver/plane/api/serializers/integration/slack.py
Normal file
@@ -0,0 +1,14 @@
|
||||
# Module imports
|
||||
from plane.api.serializers import BaseSerializer
|
||||
from plane.db.models import SlackProjectSync
|
||||
|
||||
|
||||
class SlackProjectSyncSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = SlackProjectSync
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"project",
|
||||
"workspace",
|
||||
"workspace_integration",
|
||||
]
|
||||
@@ -1,3 +1,6 @@
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
@@ -13,10 +16,10 @@ from plane.db.models import (
|
||||
Issue,
|
||||
IssueActivity,
|
||||
IssueComment,
|
||||
TimelineIssue,
|
||||
IssueProperty,
|
||||
IssueBlocker,
|
||||
IssueAssignee,
|
||||
IssueSubscriber,
|
||||
IssueLabel,
|
||||
Label,
|
||||
IssueBlocker,
|
||||
@@ -26,6 +29,9 @@ from plane.db.models import (
|
||||
ModuleIssue,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
IssueReaction,
|
||||
CommentReaction,
|
||||
IssueVote,
|
||||
)
|
||||
|
||||
|
||||
@@ -38,6 +44,7 @@ class IssueFlatSerializer(BaseSerializer):
|
||||
"id",
|
||||
"name",
|
||||
"description",
|
||||
"description_html",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
@@ -46,6 +53,20 @@ class IssueFlatSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class IssueProjectLiteSerializer(BaseSerializer):
|
||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = [
|
||||
"id",
|
||||
"project_detail",
|
||||
"name",
|
||||
"sequence_id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
##TODO: Find a better way to write this serializer
|
||||
## Find a better approach to save manytomany?
|
||||
class IssueCreateSerializer(BaseSerializer):
|
||||
@@ -91,14 +112,26 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
def validate(self, data):
|
||||
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
|
||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
blockers = validated_data.pop("blockers_list", None)
|
||||
assignees = validated_data.pop("assignees_list", None)
|
||||
labels = validated_data.pop("labels_list", None)
|
||||
blocks = validated_data.pop("blocks_list", None)
|
||||
|
||||
project = self.context["project"]
|
||||
issue = Issue.objects.create(**validated_data, project=project)
|
||||
project_id = self.context["project_id"]
|
||||
workspace_id = self.context["workspace_id"]
|
||||
default_assignee_id = self.context["default_assignee_id"]
|
||||
|
||||
issue = Issue.objects.create(**validated_data, project_id=project_id)
|
||||
|
||||
# Issue Audit Users
|
||||
created_by_id = issue.created_by_id
|
||||
updated_by_id = issue.updated_by_id
|
||||
|
||||
if blockers is not None and len(blockers):
|
||||
IssueBlocker.objects.bulk_create(
|
||||
@@ -106,10 +139,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueBlocker(
|
||||
block=issue,
|
||||
blocked_by=blocker,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for blocker in blockers
|
||||
],
|
||||
@@ -122,10 +155,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueAssignee(
|
||||
assignee=user,
|
||||
issue=issue,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for user in assignees
|
||||
],
|
||||
@@ -133,14 +166,14 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
)
|
||||
else:
|
||||
# Then assign it to default assignee
|
||||
if project.default_assignee is not None:
|
||||
if default_assignee_id is not None:
|
||||
IssueAssignee.objects.create(
|
||||
assignee=project.default_assignee,
|
||||
assignee_id=default_assignee_id,
|
||||
issue=issue,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
|
||||
if labels is not None and len(labels):
|
||||
@@ -149,10 +182,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueLabel(
|
||||
label=label,
|
||||
issue=issue,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
@@ -165,10 +198,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueBlocker(
|
||||
block=block,
|
||||
blocked_by=issue,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for block in blocks
|
||||
],
|
||||
@@ -183,6 +216,12 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
labels = validated_data.pop("labels_list", None)
|
||||
blocks = validated_data.pop("blocks_list", None)
|
||||
|
||||
# Related models
|
||||
project_id = instance.project_id
|
||||
workspace_id = instance.workspace_id
|
||||
created_by_id = instance.created_by_id
|
||||
updated_by_id = instance.updated_by_id
|
||||
|
||||
if blockers is not None:
|
||||
IssueBlocker.objects.filter(block=instance).delete()
|
||||
IssueBlocker.objects.bulk_create(
|
||||
@@ -190,10 +229,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueBlocker(
|
||||
block=instance,
|
||||
blocked_by=blocker,
|
||||
project=instance.project,
|
||||
workspace=instance.project.workspace,
|
||||
created_by=instance.created_by,
|
||||
updated_by=instance.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for blocker in blockers
|
||||
],
|
||||
@@ -207,10 +246,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueAssignee(
|
||||
assignee=user,
|
||||
issue=instance,
|
||||
project=instance.project,
|
||||
workspace=instance.project.workspace,
|
||||
created_by=instance.created_by,
|
||||
updated_by=instance.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for user in assignees
|
||||
],
|
||||
@@ -224,10 +263,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueLabel(
|
||||
label=label,
|
||||
issue=instance,
|
||||
project=instance.project,
|
||||
workspace=instance.project.workspace,
|
||||
created_by=instance.created_by,
|
||||
updated_by=instance.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
@@ -241,22 +280,25 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueBlocker(
|
||||
block=block,
|
||||
blocked_by=instance,
|
||||
project=instance.project,
|
||||
workspace=instance.project.workspace,
|
||||
created_by=instance.created_by,
|
||||
updated_by=instance.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for block in blocks
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
# Time updation occues even when other related models are updated
|
||||
instance.updated_at = timezone.now()
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class IssueActivitySerializer(BaseSerializer):
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
|
||||
class Meta:
|
||||
model = IssueActivity
|
||||
@@ -283,21 +325,6 @@ class IssueCommentSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class TimeLineIssueSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = TimelineIssue
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
class IssuePropertySerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueProperty
|
||||
@@ -345,19 +372,31 @@ class IssueLabelSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class BlockedIssueSerializer(BaseSerializer):
|
||||
blocked_issue_detail = IssueFlatSerializer(source="block", read_only=True)
|
||||
blocked_issue_detail = IssueProjectLiteSerializer(source="block", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = IssueBlocker
|
||||
fields = "__all__"
|
||||
fields = [
|
||||
"blocked_issue_detail",
|
||||
"blocked_by",
|
||||
"block",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class BlockerIssueSerializer(BaseSerializer):
|
||||
blocker_issue_detail = IssueFlatSerializer(source="blocked_by", read_only=True)
|
||||
blocker_issue_detail = IssueProjectLiteSerializer(
|
||||
source="blocked_by", read_only=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = IssueBlocker
|
||||
fields = "__all__"
|
||||
fields = [
|
||||
"blocker_issue_detail",
|
||||
"blocked_by",
|
||||
"block",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class IssueAssigneeSerializer(BaseSerializer):
|
||||
@@ -470,14 +509,108 @@ class IssueAttachmentSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class IssueReactionSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueReaction
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"actor",
|
||||
]
|
||||
|
||||
|
||||
class IssueReactionLiteSerializer(BaseSerializer):
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
|
||||
class Meta:
|
||||
model = IssueReaction
|
||||
fields = [
|
||||
"id",
|
||||
"reaction",
|
||||
"issue",
|
||||
"actor_detail",
|
||||
]
|
||||
|
||||
|
||||
class CommentReactionLiteSerializer(BaseSerializer):
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
|
||||
class Meta:
|
||||
model = CommentReaction
|
||||
fields = [
|
||||
"id",
|
||||
"reaction",
|
||||
"comment",
|
||||
"actor_detail",
|
||||
]
|
||||
|
||||
|
||||
class CommentReactionSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = CommentReaction
|
||||
fields = "__all__"
|
||||
read_only_fields = ["workspace", "project", "comment", "actor"]
|
||||
|
||||
|
||||
|
||||
class IssueVoteSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = IssueVote
|
||||
fields = ["issue", "vote", "workspace_id", "project_id", "actor"]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class IssueCommentSerializer(BaseSerializer):
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True)
|
||||
|
||||
|
||||
class Meta:
|
||||
model = IssueComment
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"access",
|
||||
]
|
||||
|
||||
|
||||
class IssueStateFlatSerializer(BaseSerializer):
|
||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = [
|
||||
"id",
|
||||
"sequence_id",
|
||||
"name",
|
||||
"state_detail",
|
||||
"project_detail",
|
||||
]
|
||||
|
||||
|
||||
# Issue Serializer with state details
|
||||
class IssueStateSerializer(BaseSerializer):
|
||||
state_detail = StateSerializer(read_only=True, source="state")
|
||||
project_detail = ProjectSerializer(read_only=True, source="project")
|
||||
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
bridge_id = serializers.UUIDField(read_only=True)
|
||||
attachment_count = serializers.IntegerField(read_only=True)
|
||||
link_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
@@ -485,9 +618,9 @@ class IssueStateSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class IssueSerializer(BaseSerializer):
|
||||
project_detail = ProjectSerializer(read_only=True, source="project")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
state_detail = StateSerializer(read_only=True, source="state")
|
||||
parent_detail = IssueFlatSerializer(read_only=True, source="parent")
|
||||
parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
|
||||
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||
# List of issues blocked by this issue
|
||||
@@ -499,6 +632,7 @@ class IssueSerializer(BaseSerializer):
|
||||
issue_link = IssueLinkSerializer(read_only=True, many=True)
|
||||
issue_attachment = IssueAttachmentSerializer(read_only=True, many=True)
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
issue_reactions = IssueReactionLiteSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
@@ -524,6 +658,7 @@ class IssueLiteSerializer(BaseSerializer):
|
||||
module_id = serializers.UUIDField(read_only=True)
|
||||
attachment_count = serializers.IntegerField(read_only=True)
|
||||
link_count = serializers.IntegerField(read_only=True)
|
||||
issue_reactions = IssueReactionLiteSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
@@ -539,3 +674,14 @@ class IssueLiteSerializer(BaseSerializer):
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
class IssueSubscriberSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueSubscriber
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
]
|
||||
|
||||
@@ -106,7 +106,7 @@ class ModuleFlatSerializer(BaseSerializer):
|
||||
|
||||
class ModuleIssueSerializer(BaseSerializer):
|
||||
module_detail = ModuleFlatSerializer(read_only=True, source="module")
|
||||
issue_detail = IssueStateSerializer(read_only=True, source="issue")
|
||||
issue_detail = ProjectLiteSerializer(read_only=True, source="issue")
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -151,7 +151,7 @@ class ModuleLinkSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class ModuleSerializer(BaseSerializer):
|
||||
project_detail = ProjectSerializer(read_only=True, source="project")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
lead_detail = UserLiteSerializer(read_only=True, source="lead")
|
||||
members_detail = UserLiteSerializer(read_only=True, many=True, source="members")
|
||||
link_module = ModuleLinkSerializer(read_only=True, many=True)
|
||||
|
||||
12
apiserver/plane/api/serializers/notification.py
Normal file
12
apiserver/plane/api/serializers/notification.py
Normal file
@@ -0,0 +1,12 @@
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from plane.db.models import Notification
|
||||
|
||||
class NotificationSerializer(BaseSerializer):
|
||||
triggered_by_details = UserLiteSerializer(read_only=True, source="triggered_by")
|
||||
|
||||
class Meta:
|
||||
model = Notification
|
||||
fields = "__all__"
|
||||
|
||||
@@ -3,7 +3,7 @@ from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .issue import IssueFlatSerializer, LabelSerializer
|
||||
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
||||
from .workspace import WorkspaceLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from plane.db.models import Page, PageBlock, PageFavorite, PageLabel, Label
|
||||
@@ -23,16 +23,22 @@ class PageBlockSerializer(BaseSerializer):
|
||||
"page",
|
||||
]
|
||||
|
||||
class PageBlockLiteSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = PageBlock
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class PageSerializer(BaseSerializer):
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
||||
labels_list = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
blocks = PageBlockSerializer(read_only=True, many=True)
|
||||
blocks = PageBlockLiteSerializer(read_only=True, many=True)
|
||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
Serializer,
|
||||
CharField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.authtoken.models import Token
|
||||
from rest_framework_simplejwt.tokens import RefreshToken
|
||||
|
||||
|
||||
from plane.db.models import User
|
||||
|
||||
|
||||
class UserSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = "__all__"
|
||||
extra_kwargs = {"password": {"write_only": True}}
|
||||
|
||||
|
||||
class ChangePasswordSerializer(Serializer):
|
||||
model = User
|
||||
|
||||
"""
|
||||
Serializer for password change endpoint.
|
||||
"""
|
||||
old_password = CharField(required=True)
|
||||
new_password = CharField(required=True)
|
||||
|
||||
|
||||
class ResetPasswordSerializer(Serializer):
|
||||
model = User
|
||||
|
||||
"""
|
||||
Serializer for password change endpoint.
|
||||
"""
|
||||
new_password = CharField(required=True)
|
||||
confirm_password = CharField(required=True)
|
||||
|
||||
|
||||
class TokenSerializer(ModelSerializer):
|
||||
|
||||
user = UserSerializer()
|
||||
access_token = SerializerMethodField()
|
||||
refresh_token = SerializerMethodField()
|
||||
|
||||
def get_access_token(self, obj):
|
||||
refresh_token = RefreshToken.for_user(obj.user)
|
||||
return str(refresh_token.access_token)
|
||||
|
||||
def get_refresh_token(self, obj):
|
||||
refresh_token = RefreshToken.for_user(obj.user)
|
||||
return str(refresh_token)
|
||||
|
||||
class Meta:
|
||||
model = Token
|
||||
fields = "__all__"
|
||||
@@ -7,13 +7,14 @@ from rest_framework import serializers
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer
|
||||
from plane.api.serializers.user import UserLiteSerializer
|
||||
from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
ProjectMember,
|
||||
ProjectMemberInvite,
|
||||
ProjectIdentifier,
|
||||
ProjectFavorite,
|
||||
ProjectDeployBoard,
|
||||
)
|
||||
|
||||
|
||||
@@ -77,11 +78,33 @@ class ProjectSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError(detail="Project Identifier is already taken")
|
||||
|
||||
|
||||
class ProjectLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = [
|
||||
"id",
|
||||
"identifier",
|
||||
"name",
|
||||
"cover_image",
|
||||
"icon_prop",
|
||||
"emoji",
|
||||
"description",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class ProjectDetailSerializer(BaseSerializer):
|
||||
workspace = WorkSpaceSerializer(read_only=True)
|
||||
default_assignee = UserLiteSerializer(read_only=True)
|
||||
project_lead = UserLiteSerializer(read_only=True)
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
total_members = serializers.IntegerField(read_only=True)
|
||||
total_cycles = serializers.IntegerField(read_only=True)
|
||||
total_modules = serializers.IntegerField(read_only=True)
|
||||
is_member = serializers.BooleanField(read_only=True)
|
||||
sort_order = serializers.FloatField(read_only=True)
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
is_deployed = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
@@ -90,7 +113,7 @@ class ProjectDetailSerializer(BaseSerializer):
|
||||
|
||||
class ProjectMemberSerializer(BaseSerializer):
|
||||
workspace = WorkSpaceSerializer(read_only=True)
|
||||
project = ProjectSerializer(read_only=True)
|
||||
project = ProjectLiteSerializer(read_only=True)
|
||||
member = UserLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -98,9 +121,19 @@ class ProjectMemberSerializer(BaseSerializer):
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ProjectMemberAdminSerializer(BaseSerializer):
|
||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||
project = ProjectLiteSerializer(read_only=True)
|
||||
member = UserAdminLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ProjectMember
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ProjectMemberInviteSerializer(BaseSerializer):
|
||||
project = ProjectSerializer(read_only=True)
|
||||
workspace = WorkSpaceSerializer(read_only=True)
|
||||
project = ProjectLiteSerializer(read_only=True)
|
||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ProjectMemberInvite
|
||||
@@ -114,7 +147,7 @@ class ProjectIdentifierSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class ProjectFavoriteSerializer(BaseSerializer):
|
||||
project_detail = ProjectSerializer(source="project", read_only=True)
|
||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ProjectFavorite
|
||||
@@ -125,8 +158,24 @@ class ProjectFavoriteSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class ProjectLiteSerializer(BaseSerializer):
|
||||
class ProjectMemberLiteSerializer(BaseSerializer):
|
||||
member = UserLiteSerializer(read_only=True)
|
||||
is_subscribed = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = ["id", "identifier", "name"]
|
||||
model = ProjectMember
|
||||
fields = ["member", "id", "is_subscribed"]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class ProjectDeployBoardSerializer(BaseSerializer):
|
||||
project_details = ProjectLiteSerializer(read_only=True, source="project")
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
|
||||
class Meta:
|
||||
model = ProjectDeployBoard
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project" "anchor",
|
||||
]
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
|
||||
from plane.db.models import Shortcut
|
||||
|
||||
|
||||
class ShortCutSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Shortcut
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
]
|
||||
@@ -1,3 +1,6 @@
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module import
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import User
|
||||
@@ -25,6 +28,10 @@ class UserSerializer(BaseSerializer):
|
||||
]
|
||||
extra_kwargs = {"password": {"write_only": True}}
|
||||
|
||||
# If the user has already filled first name or last name then he is onboarded
|
||||
def get_is_onboarded(self, obj):
|
||||
return bool(obj.first_name) or bool(obj.last_name)
|
||||
|
||||
|
||||
class UserLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
@@ -33,11 +40,50 @@ class UserLiteSerializer(BaseSerializer):
|
||||
"id",
|
||||
"first_name",
|
||||
"last_name",
|
||||
"email",
|
||||
"avatar",
|
||||
"is_bot",
|
||||
"display_name",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"is_bot",
|
||||
]
|
||||
|
||||
|
||||
class UserAdminLiteSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
"id",
|
||||
"first_name",
|
||||
"last_name",
|
||||
"avatar",
|
||||
"is_bot",
|
||||
"display_name",
|
||||
"email",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"is_bot",
|
||||
]
|
||||
|
||||
|
||||
class ChangePasswordSerializer(serializers.Serializer):
|
||||
model = User
|
||||
|
||||
"""
|
||||
Serializer for password change endpoint.
|
||||
"""
|
||||
old_password = serializers.CharField(required=True)
|
||||
new_password = serializers.CharField(required=True)
|
||||
|
||||
|
||||
class ResetPasswordSerializer(serializers.Serializer):
|
||||
model = User
|
||||
|
||||
"""
|
||||
Serializer for password change endpoint.
|
||||
"""
|
||||
new_password = serializers.CharField(required=True)
|
||||
confirm_password = serializers.CharField(required=True)
|
||||
|
||||
@@ -3,7 +3,7 @@ from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from .user import UserLiteSerializer, UserAdminLiteSerializer
|
||||
|
||||
from plane.db.models import (
|
||||
User,
|
||||
@@ -19,6 +19,7 @@ from plane.db.models import (
|
||||
class WorkSpaceSerializer(BaseSerializer):
|
||||
owner = UserLiteSerializer(read_only=True)
|
||||
total_members = serializers.IntegerField(read_only=True)
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Workspace
|
||||
@@ -32,10 +33,30 @@ class WorkSpaceSerializer(BaseSerializer):
|
||||
"owner",
|
||||
]
|
||||
|
||||
class WorkspaceLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Workspace
|
||||
fields = [
|
||||
"name",
|
||||
"slug",
|
||||
"id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
|
||||
class WorkSpaceMemberSerializer(BaseSerializer):
|
||||
member = UserLiteSerializer(read_only=True)
|
||||
workspace = WorkSpaceSerializer(read_only=True)
|
||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = WorkspaceMember
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class WorkspaceMemberAdminSerializer(BaseSerializer):
|
||||
member = UserAdminLiteSerializer(read_only=True)
|
||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = WorkspaceMember
|
||||
@@ -44,6 +65,8 @@ class WorkSpaceMemberSerializer(BaseSerializer):
|
||||
|
||||
class WorkSpaceMemberInviteSerializer(BaseSerializer):
|
||||
workspace = WorkSpaceSerializer(read_only=True)
|
||||
total_members = serializers.IntegerField(read_only=True)
|
||||
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
||||
|
||||
class Meta:
|
||||
model = WorkspaceMemberInvite
|
||||
@@ -98,17 +121,6 @@ class TeamSerializer(BaseSerializer):
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class WorkspaceLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Workspace
|
||||
fields = [
|
||||
"name",
|
||||
"slug",
|
||||
"id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class WorkspaceThemeSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = WorkspaceTheme
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import path
|
||||
|
||||
from plane.api.views import (
|
||||
# Authentication
|
||||
SignUpEndpoint,
|
||||
SignInEndpoint,
|
||||
SignOutEndpoint,
|
||||
MagicSignInEndpoint,
|
||||
@@ -21,6 +22,7 @@ from plane.api.views import (
|
||||
# User
|
||||
UserEndpoint,
|
||||
UpdateUserOnBoardedEndpoint,
|
||||
UpdateUserTourCompletedEndpoint,
|
||||
UserActivityEndpoint,
|
||||
## End User
|
||||
# Workspaces
|
||||
@@ -30,6 +32,7 @@ from plane.api.views import (
|
||||
InviteWorkspaceEndpoint,
|
||||
JoinWorkspaceEndpoint,
|
||||
WorkSpaceMemberViewSet,
|
||||
WorkspaceMembersEndpoint,
|
||||
WorkspaceInvitationsViewset,
|
||||
UserWorkspaceInvitationsEndpoint,
|
||||
WorkspaceMemberUserEndpoint,
|
||||
@@ -43,6 +46,11 @@ from plane.api.views import (
|
||||
UserIssueCompletedGraphEndpoint,
|
||||
UserWorkspaceDashboardEndpoint,
|
||||
WorkspaceThemeViewSet,
|
||||
WorkspaceUserProfileStatsEndpoint,
|
||||
WorkspaceUserActivityEndpoint,
|
||||
WorkspaceUserProfileEndpoint,
|
||||
WorkspaceUserProfileIssuesEndpoint,
|
||||
WorkspaceLabelsEndpoint,
|
||||
## End Workspaces
|
||||
# File Assets
|
||||
FileAssetEndpoint,
|
||||
@@ -52,6 +60,7 @@ from plane.api.views import (
|
||||
ProjectViewSet,
|
||||
InviteProjectEndpoint,
|
||||
ProjectMemberViewSet,
|
||||
ProjectMemberEndpoint,
|
||||
ProjectMemberInvitationsViewset,
|
||||
ProjectMemberUserEndpoint,
|
||||
AddMemberToProjectEndpoint,
|
||||
@@ -69,25 +78,26 @@ from plane.api.views import (
|
||||
BulkDeleteIssuesEndpoint,
|
||||
BulkImportIssuesEndpoint,
|
||||
ProjectUserViewsEndpoint,
|
||||
TimeLineIssueViewSet,
|
||||
IssuePropertyViewSet,
|
||||
LabelViewSet,
|
||||
SubIssuesEndpoint,
|
||||
IssueLinkViewSet,
|
||||
BulkCreateIssueLabelsEndpoint,
|
||||
IssueAttachmentEndpoint,
|
||||
IssueArchiveViewSet,
|
||||
IssueSubscriberViewSet,
|
||||
IssueCommentPublicViewSet,
|
||||
IssueReactionViewSet,
|
||||
CommentReactionViewSet,
|
||||
ExportIssuesEndpoint,
|
||||
## End Issues
|
||||
# States
|
||||
StateViewSet,
|
||||
StateDeleteIssueCheckEndpoint,
|
||||
## End States
|
||||
# Estimates
|
||||
ProjectEstimatePointEndpoint,
|
||||
BulkEstimatePointEndpoint,
|
||||
## End Estimates
|
||||
# Shortcuts
|
||||
ShortCutViewSet,
|
||||
## End Shortcuts
|
||||
# Views
|
||||
IssueViewViewSet,
|
||||
ViewIssuesEndpoint,
|
||||
@@ -97,12 +107,8 @@ from plane.api.views import (
|
||||
CycleViewSet,
|
||||
CycleIssueViewSet,
|
||||
CycleDateCheckEndpoint,
|
||||
CurrentUpcomingCyclesEndpoint,
|
||||
CompletedCyclesEndpoint,
|
||||
CycleFavoriteViewSet,
|
||||
DraftCyclesEndpoint,
|
||||
TransferCycleIssueEndpoint,
|
||||
InCompleteCyclesEndpoint,
|
||||
## End Cycles
|
||||
# Modules
|
||||
ModuleViewSet,
|
||||
@@ -116,10 +122,6 @@ from plane.api.views import (
|
||||
PageBlockViewSet,
|
||||
PageFavoriteViewSet,
|
||||
CreateIssueFromPageBlockEndpoint,
|
||||
RecentPagesEndpoint,
|
||||
FavoritePagesEndpoint,
|
||||
MyPagesEndpoint,
|
||||
CreatedbyOtherPagesEndpoint,
|
||||
## End Pages
|
||||
# Api Tokens
|
||||
ApiTokenEndpoint,
|
||||
@@ -132,6 +134,7 @@ from plane.api.views import (
|
||||
GithubIssueSyncViewSet,
|
||||
GithubCommentSyncViewSet,
|
||||
BulkCreateGithubIssueSyncEndpoint,
|
||||
SlackProjectSyncViewSet,
|
||||
## End Integrations
|
||||
# Importer
|
||||
ServiceIssueImportSummaryEndpoint,
|
||||
@@ -148,6 +151,30 @@ from plane.api.views import (
|
||||
# Release Notes
|
||||
ReleaseNotesEndpoint,
|
||||
## End Release Notes
|
||||
# Inbox
|
||||
InboxViewSet,
|
||||
InboxIssueViewSet,
|
||||
## End Inbox
|
||||
# Analytics
|
||||
AnalyticsEndpoint,
|
||||
AnalyticViewViewset,
|
||||
SavedAnalyticEndpoint,
|
||||
ExportAnalyticsEndpoint,
|
||||
DefaultAnalyticsEndpoint,
|
||||
## End Analytics
|
||||
# Notification
|
||||
NotificationViewSet,
|
||||
UnreadNotificationEndpoint,
|
||||
## End Notification
|
||||
# Public Boards
|
||||
ProjectDeployBoardViewSet,
|
||||
ProjectDeployBoardIssuesPublicEndpoint,
|
||||
ProjectDeployBoardPublicSettingsEndpoint,
|
||||
IssueReactionPublicViewSet,
|
||||
CommentReactionPublicViewSet,
|
||||
InboxIssuePublicViewSet,
|
||||
IssueVotePublicViewSet,
|
||||
## End Public Boards
|
||||
)
|
||||
|
||||
|
||||
@@ -155,6 +182,7 @@ urlpatterns = [
|
||||
# Social Auth
|
||||
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
|
||||
# Auth
|
||||
path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
|
||||
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
|
||||
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
|
||||
# Magic Sign In/Up
|
||||
@@ -171,7 +199,7 @@ urlpatterns = [
|
||||
),
|
||||
# Password Manipulation
|
||||
path(
|
||||
"password-reset/<uidb64>/<token>/",
|
||||
"reset-password/<uidb64>/<token>/",
|
||||
ResetPasswordEndpoint.as_view(),
|
||||
name="password-reset",
|
||||
),
|
||||
@@ -196,7 +224,12 @@ urlpatterns = [
|
||||
path(
|
||||
"users/me/onboard/",
|
||||
UpdateUserOnBoardedEndpoint.as_view(),
|
||||
name="change-password",
|
||||
name="user-onboard",
|
||||
),
|
||||
path(
|
||||
"users/me/tour-completed/",
|
||||
UpdateUserTourCompletedEndpoint.as_view(),
|
||||
name="user-tour",
|
||||
),
|
||||
path("users/activities/", UserActivityEndpoint.as_view(), name="user-activities"),
|
||||
# user workspaces
|
||||
@@ -294,7 +327,6 @@ urlpatterns = [
|
||||
{
|
||||
"delete": "destroy",
|
||||
"get": "retrieve",
|
||||
"get": "retrieve",
|
||||
}
|
||||
),
|
||||
name="workspace",
|
||||
@@ -308,7 +340,6 @@ urlpatterns = [
|
||||
"workspaces/<str:slug>/members/<uuid:pk>/",
|
||||
WorkSpaceMemberViewSet.as_view(
|
||||
{
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
"get": "retrieve",
|
||||
@@ -316,6 +347,11 @@ urlpatterns = [
|
||||
),
|
||||
name="workspace",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-members/",
|
||||
WorkspaceMembersEndpoint.as_view(),
|
||||
name="workspace-members",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/teams/",
|
||||
TeamMemberViewSet.as_view(
|
||||
@@ -374,6 +410,31 @@ urlpatterns = [
|
||||
),
|
||||
name="workspace-themes",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-stats/<uuid:user_id>/",
|
||||
WorkspaceUserProfileStatsEndpoint.as_view(),
|
||||
name="workspace-user-stats",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-activity/<uuid:user_id>/",
|
||||
WorkspaceUserActivityEndpoint.as_view(),
|
||||
name="workspace-user-activity",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-profile/<uuid:user_id>/",
|
||||
WorkspaceUserProfileEndpoint.as_view(),
|
||||
name="workspace-user-profile-page",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-issues/<uuid:user_id>/",
|
||||
WorkspaceUserProfileIssuesEndpoint.as_view(),
|
||||
name="workspace-user-profile-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/labels/",
|
||||
WorkspaceLabelsEndpoint.as_view(),
|
||||
name="workspace-labels",
|
||||
),
|
||||
## End Workspaces ##
|
||||
# Projects
|
||||
path(
|
||||
@@ -418,13 +479,17 @@ urlpatterns = [
|
||||
ProjectMemberViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/",
|
||||
ProjectMemberEndpoint.as_view(),
|
||||
name="project",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/members/add/",
|
||||
AddMemberToProjectEndpoint.as_view(),
|
||||
@@ -469,7 +534,6 @@ urlpatterns = [
|
||||
"workspaces/<str:slug>/user-favorite-projects/",
|
||||
ProjectFavoritesViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
@@ -508,11 +572,6 @@ urlpatterns = [
|
||||
),
|
||||
name="project-state",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:pk>/",
|
||||
StateDeleteIssueCheckEndpoint.as_view(),
|
||||
name="state-delete-check",
|
||||
),
|
||||
# End States ##
|
||||
# Estimates
|
||||
path(
|
||||
@@ -542,30 +601,6 @@ urlpatterns = [
|
||||
name="bulk-create-estimate-points",
|
||||
),
|
||||
# End Estimates ##
|
||||
# Shortcuts
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/shortcuts/",
|
||||
ShortCutViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-shortcut",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/shortcuts/<uuid:pk>/",
|
||||
ShortCutViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-shortcut",
|
||||
),
|
||||
## End Shortcuts
|
||||
# Views
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/views/",
|
||||
@@ -664,21 +699,6 @@ urlpatterns = [
|
||||
CycleDateCheckEndpoint.as_view(),
|
||||
name="project-cycle",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/current-upcoming-cycles/",
|
||||
CurrentUpcomingCyclesEndpoint.as_view(),
|
||||
name="project-cycle-upcoming",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/completed-cycles/",
|
||||
CompletedCyclesEndpoint.as_view(),
|
||||
name="project-cycle-completed",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/draft-cycles/",
|
||||
DraftCyclesEndpoint.as_view(),
|
||||
name="project-cycle-draft",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/",
|
||||
CycleFavoriteViewSet.as_view(
|
||||
@@ -703,11 +723,6 @@ urlpatterns = [
|
||||
TransferCycleIssueEndpoint.as_view(),
|
||||
name="transfer-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/incomplete-cycles/",
|
||||
InCompleteCyclesEndpoint.as_view(),
|
||||
name="transfer-issues",
|
||||
),
|
||||
## End Cycles
|
||||
# Issue
|
||||
path(
|
||||
@@ -816,6 +831,11 @@ urlpatterns = [
|
||||
IssueAttachmentEndpoint.as_view(),
|
||||
name="project-issue-attachments",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/export-issues/",
|
||||
ExportIssuesEndpoint.as_view(),
|
||||
name="export-issues",
|
||||
),
|
||||
## End Issues
|
||||
## Issue Activity
|
||||
path(
|
||||
@@ -848,30 +868,76 @@ urlpatterns = [
|
||||
name="project-issue-comment",
|
||||
),
|
||||
## End IssueComments
|
||||
## Roadmap
|
||||
# Issue Subscribers
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/roadmaps/",
|
||||
TimeLineIssueViewSet.as_view(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/",
|
||||
IssueSubscriberViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-roadmap",
|
||||
name="project-issue-subscribers",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/roadmaps/<uuid:pk>/",
|
||||
TimeLineIssueViewSet.as_view(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/<uuid:subscriber_id>/",
|
||||
IssueSubscriberViewSet.as_view({"delete": "destroy"}),
|
||||
name="project-issue-subscribers",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/subscribe/",
|
||||
IssueSubscriberViewSet.as_view(
|
||||
{
|
||||
"get": "subscription_status",
|
||||
"post": "subscribe",
|
||||
"delete": "unsubscribe",
|
||||
}
|
||||
),
|
||||
name="project-issue-subscribers",
|
||||
),
|
||||
## End Issue Subscribers
|
||||
# Issue Reactions
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
||||
IssueReactionViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-reactions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
||||
IssueReactionViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-roadmap",
|
||||
name="project-issue-reactions",
|
||||
),
|
||||
## End Roadmap
|
||||
## End Issue Reactions
|
||||
# Comment Reactions
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
||||
CommentReactionViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-comment-reactions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
||||
CommentReactionViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-comment-reactions",
|
||||
),
|
||||
## End Comment Reactions
|
||||
## IssueProperty
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-properties/",
|
||||
@@ -896,6 +962,36 @@ urlpatterns = [
|
||||
name="project-issue-roadmap",
|
||||
),
|
||||
## IssueProperty Ebd
|
||||
## Issue Archives
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
|
||||
IssueArchiveViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
name="project-issue-archive",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/<uuid:pk>/",
|
||||
IssueArchiveViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-archive",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/unarchive/<uuid:pk>/",
|
||||
IssueArchiveViewSet.as_view(
|
||||
{
|
||||
"post": "unarchive",
|
||||
}
|
||||
),
|
||||
name="project-issue-archive",
|
||||
),
|
||||
## End Issue Archives
|
||||
## File Assets
|
||||
path(
|
||||
"workspaces/<str:slug>/file-assets/",
|
||||
@@ -1077,26 +1173,6 @@ urlpatterns = [
|
||||
CreateIssueFromPageBlockEndpoint.as_view(),
|
||||
name="page-block-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/recent-pages/",
|
||||
RecentPagesEndpoint.as_view(),
|
||||
name="recent-pages",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/favorite-pages/",
|
||||
FavoritePagesEndpoint.as_view(),
|
||||
name="recent-pages",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/my-pages/",
|
||||
MyPagesEndpoint.as_view(),
|
||||
name="user-pages",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/created-by-other-pages/",
|
||||
CreatedbyOtherPagesEndpoint.as_view(),
|
||||
name="created-by-other-pages",
|
||||
),
|
||||
## End Pages
|
||||
# API Tokens
|
||||
path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
|
||||
@@ -1216,6 +1292,26 @@ urlpatterns = [
|
||||
),
|
||||
),
|
||||
## End Github Integrations
|
||||
# Slack Integration
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/",
|
||||
SlackProjectSyncViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/<uuid:pk>/",
|
||||
SlackProjectSyncViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
"get": "retrieve",
|
||||
}
|
||||
),
|
||||
),
|
||||
## End Slack Integration
|
||||
## End Integrations
|
||||
# Importer
|
||||
path(
|
||||
@@ -1246,7 +1342,7 @@ urlpatterns = [
|
||||
## End Importer
|
||||
# Search
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/search/",
|
||||
"workspaces/<str:slug>/search/",
|
||||
GlobalSearchEndpoint.as_view(),
|
||||
name="global-search",
|
||||
),
|
||||
@@ -1270,4 +1366,253 @@ urlpatterns = [
|
||||
name="release-notes",
|
||||
),
|
||||
## End Release Notes
|
||||
# Inbox
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
|
||||
InboxViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="inbox",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:pk>/",
|
||||
InboxViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="inbox",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
||||
InboxIssueViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
||||
InboxIssueViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
## End Inbox
|
||||
# Analytics
|
||||
path(
|
||||
"workspaces/<str:slug>/analytics/",
|
||||
AnalyticsEndpoint.as_view(),
|
||||
name="plane-analytics",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/analytic-view/",
|
||||
AnalyticViewViewset.as_view({"get": "list", "post": "create"}),
|
||||
name="analytic-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/analytic-view/<uuid:pk>/",
|
||||
AnalyticViewViewset.as_view(
|
||||
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="analytic-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/saved-analytic-view/<uuid:analytic_id>/",
|
||||
SavedAnalyticEndpoint.as_view(),
|
||||
name="saved-analytic-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/export-analytics/",
|
||||
ExportAnalyticsEndpoint.as_view(),
|
||||
name="export-analytics",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/default-analytics/",
|
||||
DefaultAnalyticsEndpoint.as_view(),
|
||||
name="default-analytics",
|
||||
),
|
||||
## End Analytics
|
||||
# Notification
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/",
|
||||
NotificationViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
name="notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/<uuid:pk>/",
|
||||
NotificationViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/<uuid:pk>/read/",
|
||||
NotificationViewSet.as_view(
|
||||
{
|
||||
"post": "mark_read",
|
||||
"delete": "mark_unread",
|
||||
}
|
||||
),
|
||||
name="notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/<uuid:pk>/archive/",
|
||||
NotificationViewSet.as_view(
|
||||
{
|
||||
"post": "archive",
|
||||
"delete": "unarchive",
|
||||
}
|
||||
),
|
||||
name="notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/unread/",
|
||||
UnreadNotificationEndpoint.as_view(),
|
||||
name="unread-notifications",
|
||||
),
|
||||
## End Notification
|
||||
# Public Boards
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/",
|
||||
ProjectDeployBoardViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/<uuid:pk>/",
|
||||
ProjectDeployBoardViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/settings/",
|
||||
ProjectDeployBoardPublicSettingsEndpoint.as_view(),
|
||||
name="project-deploy-board-settings",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/",
|
||||
ProjectDeployBoardIssuesPublicEndpoint.as_view(),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
||||
IssueCommentPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="issue-comments-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
||||
IssueCommentPublicViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="issue-comments-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
||||
IssueReactionPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="issue-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
||||
IssueReactionPublicViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="issue-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
||||
CommentReactionPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="comment-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
||||
CommentReactionPublicViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="comment-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
||||
InboxIssuePublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
||||
InboxIssuePublicViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/votes/",
|
||||
IssueVotePublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="issue-vote-project-board",
|
||||
),
|
||||
## End Public Boards
|
||||
]
|
||||
|
||||
@@ -12,10 +12,15 @@ from .project import (
|
||||
ProjectUserViewsEndpoint,
|
||||
ProjectMemberUserEndpoint,
|
||||
ProjectFavoritesViewSet,
|
||||
ProjectDeployBoardIssuesPublicEndpoint,
|
||||
ProjectDeployBoardViewSet,
|
||||
ProjectDeployBoardPublicSettingsEndpoint,
|
||||
ProjectMemberEndpoint,
|
||||
)
|
||||
from .people import (
|
||||
from .user import (
|
||||
UserEndpoint,
|
||||
UpdateUserOnBoardedEndpoint,
|
||||
UpdateUserTourCompletedEndpoint,
|
||||
UserActivityEndpoint,
|
||||
)
|
||||
|
||||
@@ -41,20 +46,21 @@ from .workspace import (
|
||||
UserIssueCompletedGraphEndpoint,
|
||||
UserWorkspaceDashboardEndpoint,
|
||||
WorkspaceThemeViewSet,
|
||||
WorkspaceUserProfileStatsEndpoint,
|
||||
WorkspaceUserActivityEndpoint,
|
||||
WorkspaceUserProfileEndpoint,
|
||||
WorkspaceUserProfileIssuesEndpoint,
|
||||
WorkspaceLabelsEndpoint,
|
||||
WorkspaceMembersEndpoint,
|
||||
)
|
||||
from .state import StateViewSet, StateDeleteIssueCheckEndpoint
|
||||
from .shortcut import ShortCutViewSet
|
||||
from .state import StateViewSet
|
||||
from .view import IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet
|
||||
from .cycle import (
|
||||
CycleViewSet,
|
||||
CycleIssueViewSet,
|
||||
CycleDateCheckEndpoint,
|
||||
CurrentUpcomingCyclesEndpoint,
|
||||
CompletedCyclesEndpoint,
|
||||
CycleFavoriteViewSet,
|
||||
DraftCyclesEndpoint,
|
||||
TransferCycleIssueEndpoint,
|
||||
InCompleteCyclesEndpoint,
|
||||
)
|
||||
from .asset import FileAssetEndpoint, UserAssetsEndpoint
|
||||
from .issue import (
|
||||
@@ -62,7 +68,6 @@ from .issue import (
|
||||
WorkSpaceIssuesEndpoint,
|
||||
IssueActivityEndpoint,
|
||||
IssueCommentViewSet,
|
||||
TimeLineIssueViewSet,
|
||||
IssuePropertyViewSet,
|
||||
LabelViewSet,
|
||||
BulkDeleteIssuesEndpoint,
|
||||
@@ -71,6 +76,14 @@ from .issue import (
|
||||
IssueLinkViewSet,
|
||||
BulkCreateIssueLabelsEndpoint,
|
||||
IssueAttachmentEndpoint,
|
||||
IssueArchiveViewSet,
|
||||
IssueSubscriberViewSet,
|
||||
IssueCommentPublicViewSet,
|
||||
CommentReactionViewSet,
|
||||
IssueReactionViewSet,
|
||||
IssueReactionPublicViewSet,
|
||||
CommentReactionPublicViewSet,
|
||||
IssueVotePublicViewSet,
|
||||
)
|
||||
|
||||
from .auth_extended import (
|
||||
@@ -83,6 +96,7 @@ from .auth_extended import (
|
||||
|
||||
|
||||
from .authentication import (
|
||||
SignUpEndpoint,
|
||||
SignInEndpoint,
|
||||
SignOutEndpoint,
|
||||
MagicSignInEndpoint,
|
||||
@@ -106,6 +120,7 @@ from .integration import (
|
||||
GithubCommentSyncViewSet,
|
||||
GithubRepositoriesEndpoint,
|
||||
BulkCreateGithubIssueSyncEndpoint,
|
||||
SlackProjectSyncViewSet,
|
||||
)
|
||||
|
||||
from .importer import (
|
||||
@@ -121,10 +136,6 @@ from .page import (
|
||||
PageBlockViewSet,
|
||||
PageFavoriteViewSet,
|
||||
CreateIssueFromPageBlockEndpoint,
|
||||
RecentPagesEndpoint,
|
||||
FavoritePagesEndpoint,
|
||||
MyPagesEndpoint,
|
||||
CreatedbyOtherPagesEndpoint,
|
||||
)
|
||||
|
||||
from .search import GlobalSearchEndpoint, IssueSearchEndpoint
|
||||
@@ -139,3 +150,19 @@ from .estimate import (
|
||||
|
||||
|
||||
from .release import ReleaseNotesEndpoint
|
||||
|
||||
from .inbox import InboxViewSet, InboxIssueViewSet, InboxIssuePublicViewSet
|
||||
|
||||
from .analytic import (
|
||||
AnalyticsEndpoint,
|
||||
AnalyticViewViewset,
|
||||
SavedAnalyticEndpoint,
|
||||
ExportAnalyticsEndpoint,
|
||||
DefaultAnalyticsEndpoint,
|
||||
)
|
||||
|
||||
from .notification import NotificationViewSet, UnreadNotificationEndpoint
|
||||
|
||||
from .exporter import (
|
||||
ExportIssuesEndpoint,
|
||||
)
|
||||
297
apiserver/plane/api/views/analytic.py
Normal file
297
apiserver/plane/api/views/analytic.py
Normal file
@@ -0,0 +1,297 @@
|
||||
# Django imports
|
||||
from django.db.models import (
|
||||
Count,
|
||||
Sum,
|
||||
F,
|
||||
Q
|
||||
)
|
||||
from django.db.models.functions import ExtractMonth
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.api.views import BaseAPIView, BaseViewSet
|
||||
from plane.api.permissions import WorkSpaceAdminPermission
|
||||
from plane.db.models import Issue, AnalyticView, Workspace, State, Label
|
||||
from plane.api.serializers import AnalyticViewSerializer
|
||||
from plane.utils.analytics_plot import build_graph_plot
|
||||
from plane.bgtasks.analytic_plot_export import analytic_export_task
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
|
||||
class AnalyticsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
x_axis = request.GET.get("x_axis", False)
|
||||
y_axis = request.GET.get("y_axis", False)
|
||||
|
||||
if not x_axis or not y_axis:
|
||||
return Response(
|
||||
{"error": "x-axis and y-axis dimensions are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
segment = request.GET.get("segment", False)
|
||||
filters = issue_filters(request.GET, "GET")
|
||||
|
||||
queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
|
||||
|
||||
total_issues = queryset.count()
|
||||
distribution = build_graph_plot(
|
||||
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
|
||||
)
|
||||
|
||||
colors = dict()
|
||||
if x_axis in ["state__name", "state__group"] or segment in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
]:
|
||||
if x_axis in ["state__name", "state__group"]:
|
||||
key = "name" if x_axis == "state__name" else "group"
|
||||
else:
|
||||
key = "name" if segment == "state__name" else "group"
|
||||
|
||||
colors = (
|
||||
State.objects.filter(
|
||||
~Q(name="Triage"),
|
||||
workspace__slug=slug, project_id__in=filters.get("project__in")
|
||||
).values(key, "color")
|
||||
if filters.get("project__in", False)
|
||||
else State.objects.filter(~Q(name="Triage"), workspace__slug=slug).values(key, "color")
|
||||
)
|
||||
|
||||
if x_axis in ["labels__name"] or segment in ["labels__name"]:
|
||||
colors = (
|
||||
Label.objects.filter(
|
||||
workspace__slug=slug, project_id__in=filters.get("project__in")
|
||||
).values("name", "color")
|
||||
if filters.get("project__in", False)
|
||||
else Label.objects.filter(workspace__slug=slug).values(
|
||||
"name", "color"
|
||||
)
|
||||
)
|
||||
|
||||
assignee_details = {}
|
||||
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
|
||||
assignee_details = (
|
||||
Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False)
|
||||
.order_by("assignees__id")
|
||||
.distinct("assignees__id")
|
||||
.values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id")
|
||||
)
|
||||
|
||||
|
||||
return Response(
|
||||
{
|
||||
"total": total_issues,
|
||||
"distribution": distribution,
|
||||
"extras": {"colors": colors, "assignee_details": assignee_details},
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class AnalyticViewViewset(BaseViewSet):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
model = AnalyticView
|
||||
serializer_class = AnalyticViewSerializer
|
||||
|
||||
def perform_create(self, serializer):
|
||||
workspace = Workspace.objects.get(slug=self.kwargs.get("slug"))
|
||||
serializer.save(workspace_id=workspace.id)
|
||||
|
||||
def get_queryset(self):
|
||||
return self.filter_queryset(
|
||||
super().get_queryset().filter(workspace__slug=self.kwargs.get("slug"))
|
||||
)
|
||||
|
||||
|
||||
class SavedAnalyticEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, analytic_id):
|
||||
try:
|
||||
analytic_view = AnalyticView.objects.get(
|
||||
pk=analytic_id, workspace__slug=slug
|
||||
)
|
||||
|
||||
filter = analytic_view.query
|
||||
queryset = Issue.issue_objects.filter(**filter)
|
||||
|
||||
x_axis = analytic_view.query_dict.get("x_axis", False)
|
||||
y_axis = analytic_view.query_dict.get("y_axis", False)
|
||||
|
||||
if not x_axis or not y_axis:
|
||||
return Response(
|
||||
{"error": "x-axis and y-axis dimensions are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
segment = request.GET.get("segment", False)
|
||||
distribution = build_graph_plot(
|
||||
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
|
||||
)
|
||||
total_issues = queryset.count()
|
||||
return Response(
|
||||
{"total": total_issues, "distribution": distribution},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except AnalyticView.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Analytic View Does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class ExportAnalyticsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
def post(self, request, slug):
|
||||
try:
|
||||
x_axis = request.data.get("x_axis", False)
|
||||
y_axis = request.data.get("y_axis", False)
|
||||
|
||||
if not x_axis or not y_axis:
|
||||
return Response(
|
||||
{"error": "x-axis and y-axis dimensions are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
analytic_export_task.delay(
|
||||
email=request.user.email, data=request.data, slug=slug
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}"
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class DefaultAnalyticsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
filters = issue_filters(request.GET, "GET")
|
||||
|
||||
queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
|
||||
|
||||
total_issues = queryset.count()
|
||||
|
||||
total_issues_classified = (
|
||||
queryset.annotate(state_group=F("state__group"))
|
||||
.values("state_group")
|
||||
.annotate(state_count=Count("state_group"))
|
||||
.order_by("state_group")
|
||||
)
|
||||
|
||||
open_issues = queryset.filter(
|
||||
state__group__in=["backlog", "unstarted", "started"]
|
||||
).count()
|
||||
|
||||
open_issues_classified = (
|
||||
queryset.filter(state__group__in=["backlog", "unstarted", "started"])
|
||||
.annotate(state_group=F("state__group"))
|
||||
.values("state_group")
|
||||
.annotate(state_count=Count("state_group"))
|
||||
.order_by("state_group")
|
||||
)
|
||||
|
||||
issue_completed_month_wise = (
|
||||
queryset.filter(completed_at__isnull=False)
|
||||
.annotate(month=ExtractMonth("completed_at"))
|
||||
.values("month")
|
||||
.annotate(count=Count("*"))
|
||||
.order_by("month")
|
||||
)
|
||||
most_issue_created_user = (
|
||||
queryset.exclude(created_by=None)
|
||||
.values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__display_name", "created_by__id")
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")
|
||||
)[:5]
|
||||
|
||||
most_issue_closed_user = (
|
||||
queryset.filter(completed_at__isnull=False, assignees__isnull=False)
|
||||
.values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id")
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")
|
||||
)[:5]
|
||||
|
||||
pending_issue_user = (
|
||||
queryset.filter(completed_at__isnull=True)
|
||||
.values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id")
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")
|
||||
)
|
||||
|
||||
open_estimate_sum = (
|
||||
queryset.filter(
|
||||
state__group__in=["backlog", "unstarted", "started"]
|
||||
).aggregate(open_estimate_sum=Sum("estimate_point"))
|
||||
)["open_estimate_sum"]
|
||||
print(open_estimate_sum)
|
||||
|
||||
total_estimate_sum = queryset.aggregate(
|
||||
total_estimate_sum=Sum("estimate_point")
|
||||
)["total_estimate_sum"]
|
||||
|
||||
return Response(
|
||||
{
|
||||
"total_issues": total_issues,
|
||||
"total_issues_classified": total_issues_classified,
|
||||
"open_issues": open_issues,
|
||||
"open_issues_classified": open_issues_classified,
|
||||
"issue_completed_month_wise": issue_completed_month_wise,
|
||||
"most_issue_created_user": most_issue_created_user,
|
||||
"most_issue_closed_user": most_issue_closed_user,
|
||||
"pending_issue_user": pending_issue_user,
|
||||
"open_estimate_sum": open_estimate_sum,
|
||||
"total_estimate_sum": total_estimate_sum,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
@@ -3,10 +3,10 @@ from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.parsers import MultiPartParser, FormParser
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
from django.conf import settings
|
||||
# Module imports
|
||||
from .base import BaseAPIView
|
||||
from plane.db.models import FileAsset
|
||||
from plane.db.models import FileAsset, Workspace
|
||||
from plane.api.serializers import FileAssetSerializer
|
||||
|
||||
|
||||
@@ -27,15 +27,13 @@ class FileAssetEndpoint(BaseAPIView):
|
||||
try:
|
||||
serializer = FileAssetSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
if request.user.last_workspace_id is None:
|
||||
return Response(
|
||||
{"error": "Workspace id is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer.save(workspace_id=request.user.last_workspace_id)
|
||||
# Get the workspace
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
serializer.save(workspace_id=workspace.id)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Workspace.DoesNotExist:
|
||||
return Response({"error": "Workspace does not exist"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
|
||||
@@ -22,7 +22,7 @@ from sentry_sdk import capture_exception
|
||||
|
||||
## Module imports
|
||||
from . import BaseAPIView
|
||||
from plane.api.serializers.people import (
|
||||
from plane.api.serializers import (
|
||||
ChangePasswordSerializer,
|
||||
ResetPasswordSerializer,
|
||||
)
|
||||
|
||||
@@ -36,6 +36,99 @@ def get_tokens_for_user(user):
|
||||
)
|
||||
|
||||
|
||||
class SignUpEndpoint(BaseAPIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def post(self, request):
|
||||
try:
|
||||
if not settings.ENABLE_SIGNUP:
|
||||
return Response(
|
||||
{
|
||||
"error": "New account creation is disabled. Please contact your site administrator"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
email = request.data.get("email", False)
|
||||
password = request.data.get("password", False)
|
||||
|
||||
## Raise exception if any of the above are missing
|
||||
if not email or not password:
|
||||
return Response(
|
||||
{"error": "Both email and password are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
email = email.strip().lower()
|
||||
|
||||
try:
|
||||
validate_email(email)
|
||||
except ValidationError as e:
|
||||
return Response(
|
||||
{"error": "Please provide a valid email address."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check if the user already exists
|
||||
if User.objects.filter(email=email).exists():
|
||||
return Response(
|
||||
{"error": "User with this email already exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
user = User.objects.create(email=email, username=uuid.uuid4().hex)
|
||||
user.set_password(password)
|
||||
|
||||
# settings last actives for the user
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
|
||||
serialized_user = UserSerializer(user).data
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"user": serialized_user,
|
||||
}
|
||||
|
||||
# Send Analytics
|
||||
if settings.ANALYTICS_BASE_API:
|
||||
_ = requests.post(
|
||||
settings.ANALYTICS_BASE_API,
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
||||
},
|
||||
json={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"event_data": {
|
||||
"medium": "email",
|
||||
},
|
||||
"user": {"email": email, "id": str(user.id)},
|
||||
"device_ctx": {
|
||||
"ip": request.META.get("REMOTE_ADDR"),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||
},
|
||||
"event_type": "SIGN_UP",
|
||||
},
|
||||
)
|
||||
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class SignInEndpoint(BaseAPIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
@@ -63,108 +156,69 @@ class SignInEndpoint(BaseAPIView):
|
||||
|
||||
user = User.objects.filter(email=email).first()
|
||||
|
||||
# Sign up Process
|
||||
if user is None:
|
||||
user = User.objects.create(email=email, username=uuid.uuid4().hex)
|
||||
user.set_password(password)
|
||||
return Response(
|
||||
{
|
||||
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
|
||||
},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
# settings last actives for the user
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
# Sign up Process
|
||||
if not user.check_password(password):
|
||||
return Response(
|
||||
{
|
||||
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
|
||||
},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
if not user.is_active:
|
||||
return Response(
|
||||
{
|
||||
"error": "Your account has been deactivated. Please contact your site administrator."
|
||||
},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
serialized_user = UserSerializer(user).data
|
||||
serialized_user = UserSerializer(user).data
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
# settings last active for the user
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"user": serialized_user,
|
||||
}
|
||||
|
||||
# Send Analytics
|
||||
if settings.ANALYTICS_BASE_API:
|
||||
_ = requests.post(
|
||||
settings.ANALYTICS_BASE_API,
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
# Send Analytics
|
||||
if settings.ANALYTICS_BASE_API:
|
||||
_ = requests.post(
|
||||
settings.ANALYTICS_BASE_API,
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
||||
},
|
||||
json={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"event_data": {
|
||||
"medium": "email",
|
||||
},
|
||||
json={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"event_data": {
|
||||
"medium": "email",
|
||||
},
|
||||
"user": {"email": email, "id": str(user.id)},
|
||||
"device_ctx": {
|
||||
"ip": request.META.get("REMOTE_ADDR"),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||
},
|
||||
"event_type": "SIGN_UP",
|
||||
"user": {"email": email, "id": str(user.id)},
|
||||
"device_ctx": {
|
||||
"ip": request.META.get("REMOTE_ADDR"),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||
},
|
||||
)
|
||||
"event_type": "SIGN_IN",
|
||||
},
|
||||
)
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"user": serialized_user,
|
||||
}
|
||||
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
# Sign in Process
|
||||
else:
|
||||
if not user.check_password(password):
|
||||
return Response(
|
||||
{
|
||||
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
|
||||
},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
if not user.is_active:
|
||||
return Response(
|
||||
{
|
||||
"error": "Your account has been deactivated. Please contact your site administrator."
|
||||
},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
serialized_user = UserSerializer(user).data
|
||||
|
||||
# settings last active for the user
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
# Send Analytics
|
||||
if settings.ANALYTICS_BASE_API:
|
||||
_ = requests.post(
|
||||
settings.ANALYTICS_BASE_API,
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
||||
},
|
||||
json={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"event_data": {
|
||||
"medium": "email",
|
||||
},
|
||||
"user": {"email": email, "id": str(user.id)},
|
||||
"device_ctx": {
|
||||
"ip": request.META.get("REMOTE_ADDR"),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||
},
|
||||
"event_type": "SIGN_IN",
|
||||
},
|
||||
)
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"user": serialized_user,
|
||||
}
|
||||
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
@@ -225,6 +279,8 @@ class MagicSignInGenerateEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Clean up
|
||||
email = email.strip().lower()
|
||||
validate_email(email)
|
||||
|
||||
## Generate a random token
|
||||
@@ -291,8 +347,8 @@ class MagicSignInEndpoint(BaseAPIView):
|
||||
|
||||
def post(self, request):
|
||||
try:
|
||||
user_token = request.data.get("token", "").strip().lower()
|
||||
key = request.data.get("key", False)
|
||||
user_token = request.data.get("token", "").strip()
|
||||
key = request.data.get("key", False).strip().lower()
|
||||
|
||||
if not key or user_token == "":
|
||||
return Response(
|
||||
|
||||
@@ -3,7 +3,17 @@ import json
|
||||
|
||||
# Django imports
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import OuterRef, Func, F, Q, Exists, OuterRef, Count, Prefetch
|
||||
from django.db.models import (
|
||||
OuterRef,
|
||||
Func,
|
||||
F,
|
||||
Q,
|
||||
Exists,
|
||||
OuterRef,
|
||||
Count,
|
||||
Prefetch,
|
||||
Sum,
|
||||
)
|
||||
from django.core import serializers
|
||||
from django.utils import timezone
|
||||
from django.utils.decorators import method_decorator
|
||||
@@ -21,19 +31,23 @@ from plane.api.serializers import (
|
||||
CycleIssueSerializer,
|
||||
CycleFavoriteSerializer,
|
||||
IssueStateSerializer,
|
||||
CycleWriteSerializer,
|
||||
)
|
||||
from plane.api.permissions import ProjectEntityPermission
|
||||
from plane.db.models import (
|
||||
User,
|
||||
Cycle,
|
||||
CycleIssue,
|
||||
Issue,
|
||||
CycleFavorite,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
Label,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.utils.grouper import group_results
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
|
||||
|
||||
class CycleViewSet(BaseViewSet):
|
||||
@@ -48,6 +62,28 @@ class CycleViewSet(BaseViewSet):
|
||||
project_id=self.kwargs.get("project_id"), owned_by=self.request.user
|
||||
)
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
cycle_issues = list(
|
||||
CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
|
||||
"issue", flat=True
|
||||
)
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"cycle_id": str(self.kwargs.get("pk")),
|
||||
"issues": [str(issue_id) for issue_id in cycle_issues],
|
||||
}
|
||||
),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("pk", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=None,
|
||||
)
|
||||
|
||||
return super().perform_destroy(instance)
|
||||
|
||||
def get_queryset(self):
|
||||
subquery = CycleFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
@@ -96,10 +132,171 @@ class CycleViewSet(BaseViewSet):
|
||||
filter=Q(issue_cycle__issue__state__group="backlog"),
|
||||
)
|
||||
)
|
||||
.annotate(total_estimates=Sum("issue_cycle__issue__estimate_point"))
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
"issue_cycle__issue__estimate_point",
|
||||
filter=Q(issue_cycle__issue__state__group="completed"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_estimates=Sum(
|
||||
"issue_cycle__issue__estimate_point",
|
||||
filter=Q(issue_cycle__issue__state__group="started"),
|
||||
)
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_cycle__issue__assignees",
|
||||
queryset=User.objects.only("avatar", "first_name", "id").distinct(),
|
||||
)
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_cycle__issue__labels",
|
||||
queryset=Label.objects.only("name", "color", "id").distinct(),
|
||||
)
|
||||
)
|
||||
.order_by("-is_favorite", "name")
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def list(self, request, slug, project_id):
|
||||
try:
|
||||
queryset = self.get_queryset()
|
||||
cycle_view = request.GET.get("cycle_view", "all")
|
||||
order_by = request.GET.get("order_by", "sort_order")
|
||||
|
||||
queryset = queryset.order_by(order_by)
|
||||
|
||||
# All Cycles
|
||||
if cycle_view == "all":
|
||||
return Response(
|
||||
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
# Current Cycle
|
||||
if cycle_view == "current":
|
||||
queryset = queryset.filter(
|
||||
start_date__lte=timezone.now(),
|
||||
end_date__gte=timezone.now(),
|
||||
)
|
||||
|
||||
data = CycleSerializer(queryset, many=True).data
|
||||
|
||||
if len(data):
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_cycle__cycle_id=data[0]["id"],
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(first_name=F("assignees__first_name"))
|
||||
.annotate(last_name=F("assignees__last_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.values("first_name", "last_name", "assignee_id", "avatar")
|
||||
.annotate(total_issues=Count("assignee_id"))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"assignee_id",
|
||||
filter=Q(completed_at__isnull=False),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"assignee_id",
|
||||
filter=Q(completed_at__isnull=True),
|
||||
)
|
||||
)
|
||||
.order_by("first_name", "last_name")
|
||||
)
|
||||
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_cycle__cycle_id=data[0]["id"],
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(total_issues=Count("label_id"))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"label_id",
|
||||
filter=Q(completed_at__isnull=False),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"label_id",
|
||||
filter=Q(completed_at__isnull=True),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
data[0]["distribution"] = {
|
||||
"assignees": assignee_distribution,
|
||||
"labels": label_distribution,
|
||||
"completion_chart": {},
|
||||
}
|
||||
if data[0]["start_date"] and data[0]["end_date"]:
|
||||
data[0]["distribution"]["completion_chart"] = burndown_plot(
|
||||
queryset=queryset.first(),
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
cycle_id=data[0]["id"],
|
||||
)
|
||||
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
# Upcoming Cycles
|
||||
if cycle_view == "upcoming":
|
||||
queryset = queryset.filter(start_date__gt=timezone.now())
|
||||
return Response(
|
||||
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
# Completed Cycles
|
||||
if cycle_view == "completed":
|
||||
queryset = queryset.filter(end_date__lt=timezone.now())
|
||||
return Response(
|
||||
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
# Draft Cycles
|
||||
if cycle_view == "draft":
|
||||
queryset = queryset.filter(
|
||||
end_date=None,
|
||||
start_date=None,
|
||||
)
|
||||
|
||||
return Response(
|
||||
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
# Incomplete Cycles
|
||||
if cycle_view == "incomplete":
|
||||
queryset = queryset.filter(
|
||||
Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True),
|
||||
)
|
||||
return Response(
|
||||
CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
return Response(
|
||||
{"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
try:
|
||||
if (
|
||||
@@ -145,7 +342,7 @@ class CycleViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = CycleSerializer(cycle, data=request.data, partial=True)
|
||||
serializer = CycleWriteSerializer(cycle, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -161,6 +358,93 @@ class CycleViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def retrieve(self, request, slug, project_id, pk):
|
||||
try:
|
||||
queryset = self.get_queryset().get(pk=pk)
|
||||
|
||||
# Assignee Distribution
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_cycle__cycle_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(first_name=F("assignees__first_name"))
|
||||
.annotate(last_name=F("assignees__last_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.values("first_name", "last_name", "assignee_id", "avatar", "display_name")
|
||||
.annotate(total_issues=Count("assignee_id"))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"assignee_id",
|
||||
filter=Q(completed_at__isnull=False),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"assignee_id",
|
||||
filter=Q(completed_at__isnull=True),
|
||||
)
|
||||
)
|
||||
.order_by("first_name", "last_name")
|
||||
)
|
||||
|
||||
# Label Distribution
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_cycle__cycle_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(total_issues=Count("label_id"))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"label_id",
|
||||
filter=Q(completed_at__isnull=False),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"label_id",
|
||||
filter=Q(completed_at__isnull=True),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
|
||||
data = CycleSerializer(queryset).data
|
||||
data["distribution"] = {
|
||||
"assignees": assignee_distribution,
|
||||
"labels": label_distribution,
|
||||
"completion_chart": {},
|
||||
}
|
||||
|
||||
if queryset.start_date and queryset.end_date:
|
||||
data["distribution"]["completion_chart"] = burndown_plot(
|
||||
queryset=queryset, slug=slug, project_id=project_id, cycle_id=pk
|
||||
)
|
||||
|
||||
return Response(
|
||||
data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Cycle.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Cycle Does not exists"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class CycleIssueViewSet(BaseViewSet):
|
||||
serializer_class = CycleIssueSerializer
|
||||
@@ -181,12 +465,28 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
cycle_id=self.kwargs.get("cycle_id"),
|
||||
)
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"cycle_id": str(self.kwargs.get("cycle_id")),
|
||||
"issues": [str(instance.issue_id)],
|
||||
}
|
||||
),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("pk", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=None,
|
||||
)
|
||||
return super().perform_destroy(instance)
|
||||
|
||||
def get_queryset(self):
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
.annotate(
|
||||
sub_issues_count=Issue.objects.filter(parent=OuterRef("issue_id"))
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue_id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
@@ -210,9 +510,9 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
group_by = request.GET.get("group_by", False)
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issues = (
|
||||
Issue.objects.filter(issue_cycle__cycle_id=cycle_id)
|
||||
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.objects.filter(parent=OuterRef("id"))
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
@@ -286,9 +586,9 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
|
||||
# Get all CycleIssues already created
|
||||
cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
|
||||
records_to_update = []
|
||||
update_cycle_issue_activity = []
|
||||
record_to_create = []
|
||||
records_to_update = []
|
||||
|
||||
for issue in issues:
|
||||
cycle_issue = [
|
||||
@@ -333,7 +633,7 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
type="cycle.activity.created",
|
||||
requested_data=json.dumps({"cycles_list": issues}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("pk", None)),
|
||||
@@ -375,7 +675,7 @@ class CycleDateCheckEndpoint(BaseAPIView):
|
||||
try:
|
||||
start_date = request.data.get("start_date", False)
|
||||
end_date = request.data.get("end_date", False)
|
||||
|
||||
cycle_id = request.data.get("cycle_id")
|
||||
if not start_date or not end_date:
|
||||
return Response(
|
||||
{"error": "Start date and end date both are required"},
|
||||
@@ -383,18 +683,19 @@ class CycleDateCheckEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
cycles = Cycle.objects.filter(
|
||||
Q(start_date__lte=start_date, end_date__gte=start_date)
|
||||
| Q(start_date__lte=end_date, end_date__gte=end_date)
|
||||
| Q(start_date__gte=start_date, end_date__lte=end_date),
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
Q(workspace__slug=slug)
|
||||
& Q(project_id=project_id)
|
||||
& (
|
||||
Q(start_date__lte=start_date, end_date__gte=start_date)
|
||||
| Q(start_date__lte=end_date, end_date__gte=end_date)
|
||||
| Q(start_date__gte=start_date, end_date__lte=end_date)
|
||||
)
|
||||
).exclude(pk=cycle_id)
|
||||
|
||||
if cycles.exists():
|
||||
return Response(
|
||||
{
|
||||
"error": "You have a cycle already on the given dates, if you want to create your draft cycle you can do that by removing dates",
|
||||
"cycles": CycleSerializer(cycles, many=True).data,
|
||||
"status": False,
|
||||
}
|
||||
)
|
||||
@@ -408,272 +709,7 @@ class CycleDateCheckEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
|
||||
class CurrentUpcomingCyclesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
subquery = CycleFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
cycle_id=OuterRef("pk"),
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
current_cycle = (
|
||||
Cycle.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
start_date__lte=timezone.now(),
|
||||
end_date__gte=timezone.now(),
|
||||
)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.annotate(total_issues=Count("issue_cycle"))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="completed"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cancelled_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="cancelled"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="started"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
unstarted_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="unstarted"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="backlog"),
|
||||
)
|
||||
)
|
||||
.order_by("name", "-is_favorite")
|
||||
)
|
||||
|
||||
upcoming_cycle = (
|
||||
Cycle.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
start_date__gt=timezone.now(),
|
||||
)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.annotate(total_issues=Count("issue_cycle"))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="completed"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cancelled_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="cancelled"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="started"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
unstarted_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="unstarted"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="backlog"),
|
||||
)
|
||||
)
|
||||
.order_by("name", "-is_favorite")
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"current_cycle": CycleSerializer(current_cycle, many=True).data,
|
||||
"upcoming_cycle": CycleSerializer(upcoming_cycle, many=True).data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class CompletedCyclesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
subquery = CycleFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
cycle_id=OuterRef("pk"),
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
completed_cycles = (
|
||||
Cycle.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
end_date__lt=timezone.now(),
|
||||
)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.annotate(total_issues=Count("issue_cycle"))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="completed"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cancelled_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="cancelled"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="started"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
unstarted_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="unstarted"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="backlog"),
|
||||
)
|
||||
)
|
||||
.order_by("name", "-is_favorite")
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"completed_cycles": CycleSerializer(
|
||||
completed_cycles, many=True
|
||||
).data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class DraftCyclesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
subquery = CycleFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
cycle_id=OuterRef("pk"),
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
draft_cycles = (
|
||||
Cycle.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
end_date=None,
|
||||
start_date=None,
|
||||
)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.annotate(total_issues=Count("issue_cycle"))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="completed"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cancelled_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="cancelled"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="started"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
unstarted_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="unstarted"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(issue_cycle__issue__state__group="backlog"),
|
||||
)
|
||||
)
|
||||
.order_by("name", "-is_favorite")
|
||||
)
|
||||
|
||||
return Response(
|
||||
{"draft_cycles": CycleSerializer(draft_cycles, many=True).data},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class CycleFavoriteViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
serializer_class = CycleFavoriteSerializer
|
||||
model = CycleFavorite
|
||||
@@ -794,22 +830,3 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class InCompleteCyclesEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
cycles = Cycle.objects.filter(
|
||||
Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True),
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
).select_related("owned_by")
|
||||
|
||||
serializer = CycleSerializer(cycles, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -53,11 +53,11 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
||||
try:
|
||||
estimates = Estimate.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).prefetch_related("points")
|
||||
).prefetch_related("points").select_related("workspace", "project")
|
||||
serializer = EstimateReadSerializer(estimates, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -211,7 +211,7 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
||||
|
||||
try:
|
||||
EstimatePoint.objects.bulk_update(
|
||||
updated_estimate_points, ["value"], batch_size=10
|
||||
updated_estimate_points, ["value"], batch_size=10,
|
||||
)
|
||||
except IntegrityError as e:
|
||||
return Response(
|
||||
|
||||
100
apiserver/plane/api/views/exporter.py
Normal file
100
apiserver/plane/api/views/exporter.py
Normal file
@@ -0,0 +1,100 @@
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from . import BaseAPIView
|
||||
from plane.api.permissions import WorkSpaceAdminPermission
|
||||
from plane.bgtasks.export_task import issue_export_task
|
||||
from plane.db.models import Project, ExporterHistory, Workspace
|
||||
|
||||
from plane.api.serializers import ExporterHistorySerializer
|
||||
|
||||
|
||||
class ExportIssuesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
model = ExporterHistory
|
||||
serializer_class = ExporterHistorySerializer
|
||||
|
||||
def post(self, request, slug):
|
||||
try:
|
||||
# Get the workspace
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
provider = request.data.get("provider", False)
|
||||
multiple = request.data.get("multiple", False)
|
||||
project_ids = request.data.get("project", [])
|
||||
|
||||
if provider in ["csv", "xlsx", "json"]:
|
||||
if not project_ids:
|
||||
project_ids = Project.objects.filter(
|
||||
workspace__slug=slug
|
||||
).values_list("id", flat=True)
|
||||
project_ids = [str(project_id) for project_id in project_ids]
|
||||
|
||||
exporter = ExporterHistory.objects.create(
|
||||
workspace=workspace,
|
||||
project=project_ids,
|
||||
initiated_by=request.user,
|
||||
provider=provider,
|
||||
)
|
||||
|
||||
issue_export_task.delay(
|
||||
provider=exporter.provider,
|
||||
workspace_id=workspace.id,
|
||||
project_ids=project_ids,
|
||||
token_id=exporter.token,
|
||||
multiple=multiple,
|
||||
slug=slug,
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"message": f"Once the export is ready you will be able to download it"
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
else:
|
||||
return Response(
|
||||
{"error": f"Provider '{provider}' not found."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Workspace.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Workspace does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
exporter_history = ExporterHistory.objects.filter(
|
||||
workspace__slug=slug
|
||||
).select_related("workspace","initiated_by")
|
||||
|
||||
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=exporter_history,
|
||||
on_results=lambda exporter_history: ExporterHistorySerializer(
|
||||
exporter_history, many=True
|
||||
).data,
|
||||
)
|
||||
else:
|
||||
return Response(
|
||||
{"error": "per_page and cursor are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
@@ -30,31 +30,6 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
count = 0
|
||||
|
||||
# If logger is enabled check for request limit
|
||||
if settings.LOGGER_BASE_URL:
|
||||
try:
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
settings.LOGGER_BASE_URL,
|
||||
json={"user_id": str(request.user.id)},
|
||||
headers=headers,
|
||||
)
|
||||
count = response.json().get("count", 0)
|
||||
if not response.json().get("success", False):
|
||||
return Response(
|
||||
{
|
||||
"error": "You have surpassed the monthly limit for AI assistance"
|
||||
},
|
||||
status=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
|
||||
prompt = request.data.get("prompt", False)
|
||||
task = request.data.get("task", False)
|
||||
|
||||
@@ -67,7 +42,7 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
||||
|
||||
openai.api_key = settings.OPENAI_API_KEY
|
||||
response = openai.Completion.create(
|
||||
engine=settings.GPT_ENGINE,
|
||||
model=settings.GPT_ENGINE,
|
||||
prompt=final_text,
|
||||
temperature=0.7,
|
||||
max_tokens=1024,
|
||||
@@ -82,7 +57,6 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
||||
{
|
||||
"response": text,
|
||||
"response_html": text_html,
|
||||
"count": count,
|
||||
"project_detail": ProjectLiteSerializer(project).data,
|
||||
"workspace_detail": WorkspaceLiteSerializer(workspace).data,
|
||||
},
|
||||
|
||||
@@ -7,7 +7,7 @@ from rest_framework.response import Response
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Max
|
||||
from django.db.models import Max, Q
|
||||
|
||||
# Module imports
|
||||
from plane.api.views import BaseAPIView
|
||||
@@ -28,6 +28,7 @@ from plane.db.models import (
|
||||
Module,
|
||||
ModuleLink,
|
||||
ModuleIssue,
|
||||
Label,
|
||||
)
|
||||
from plane.api.serializers import (
|
||||
ImporterSerializer,
|
||||
@@ -41,16 +42,34 @@ from plane.utils.html_processor import strip_tags
|
||||
|
||||
|
||||
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
|
||||
|
||||
def get(self, request, slug, service):
|
||||
try:
|
||||
if service == "github":
|
||||
owner = request.GET.get("owner", False)
|
||||
repo = request.GET.get("repo", False)
|
||||
|
||||
if not owner or not repo:
|
||||
return Response(
|
||||
{"error": "Owner and repo are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
integration__provider="github", workspace__slug=slug
|
||||
)
|
||||
|
||||
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
|
||||
owner = request.GET.get("owner")
|
||||
repo = request.GET.get("repo")
|
||||
access_tokens_url = workspace_integration.metadata.get(
|
||||
"access_tokens_url", False
|
||||
)
|
||||
|
||||
if not access_tokens_url:
|
||||
return Response(
|
||||
{
|
||||
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
issue_count, labels, collaborators = get_github_repo_details(
|
||||
access_tokens_url, owner, repo
|
||||
@@ -235,9 +254,22 @@ class ImportServiceEndpoint(BaseAPIView):
|
||||
|
||||
def delete(self, request, slug, service, pk):
|
||||
try:
|
||||
importer = Importer.objects.filter(
|
||||
importer = Importer.objects.get(
|
||||
pk=pk, service=service, workspace__slug=slug
|
||||
)
|
||||
|
||||
if importer.imported_data is not None:
|
||||
# Delete all imported Issues
|
||||
imported_issues = importer.imported_data.get("issues", [])
|
||||
Issue.issue_objects.filter(id__in=imported_issues).delete()
|
||||
|
||||
# Delete all imported Labels
|
||||
imported_labels = importer.imported_data.get("labels", [])
|
||||
Label.objects.filter(id__in=imported_labels).delete()
|
||||
|
||||
if importer.service == "jira":
|
||||
imported_modules = importer.imported_data.get("modules", [])
|
||||
Module.objects.filter(id__in=imported_modules).delete()
|
||||
importer.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except Exception as e:
|
||||
@@ -247,6 +279,27 @@ class ImportServiceEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def patch(self, request, slug, service, pk):
|
||||
try:
|
||||
importer = Importer.objects.get(
|
||||
pk=pk, service=service, workspace__slug=slug
|
||||
)
|
||||
serializer = ImporterSerializer(importer, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Importer.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Importer Does not exists"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class UpdateServiceImportStatusEndpoint(BaseAPIView):
|
||||
def post(self, request, slug, project_id, service, importer_id):
|
||||
@@ -274,11 +327,13 @@ class BulkImportIssuesEndpoint(BaseAPIView):
|
||||
|
||||
# Get the default state
|
||||
default_state = State.objects.filter(
|
||||
project_id=project_id, default=True
|
||||
~Q(name="Triage"), project_id=project_id, default=True
|
||||
).first()
|
||||
# if there is no default state assign any random state
|
||||
if default_state is None:
|
||||
default_state = State.objects.filter(project_id=project_id).first()
|
||||
default_state = State.objects.filter(
|
||||
~Q(name="Triage"), project_id=project_id
|
||||
).first()
|
||||
|
||||
# Get the maximum sequence_id
|
||||
last_id = IssueSequence.objects.filter(project_id=project_id).aggregate(
|
||||
@@ -330,6 +385,7 @@ class BulkImportIssuesEndpoint(BaseAPIView):
|
||||
start_date=issue_data.get("start_date", None),
|
||||
target_date=issue_data.get("target_date", None),
|
||||
priority=issue_data.get("priority", None),
|
||||
created_by=request.user,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -367,7 +423,6 @@ class BulkImportIssuesEndpoint(BaseAPIView):
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
for label_id in labels_list
|
||||
]
|
||||
@@ -387,7 +442,6 @@ class BulkImportIssuesEndpoint(BaseAPIView):
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
for assignee_id in assignees_list
|
||||
]
|
||||
@@ -404,8 +458,9 @@ class BulkImportIssuesEndpoint(BaseAPIView):
|
||||
actor=request.user,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
comment=f"{request.user.email} importer the issue from {service}",
|
||||
comment=f"imported the issue from {service}",
|
||||
verb="created",
|
||||
created_by=request.user,
|
||||
)
|
||||
for issue in issues
|
||||
],
|
||||
@@ -424,7 +479,6 @@ class BulkImportIssuesEndpoint(BaseAPIView):
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
for comment in comments_list
|
||||
]
|
||||
@@ -441,7 +495,6 @@ class BulkImportIssuesEndpoint(BaseAPIView):
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
for issue, issue_data in zip(issues, issues_data)
|
||||
]
|
||||
@@ -479,7 +532,6 @@ class BulkImportModulesEndpoint(BaseAPIView):
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
for module in modules_data
|
||||
],
|
||||
@@ -487,48 +539,57 @@ class BulkImportModulesEndpoint(BaseAPIView):
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
_ = ModuleLink.objects.bulk_create(
|
||||
[
|
||||
ModuleLink(
|
||||
module=module,
|
||||
url=module_data.get("link", {}).get("url", "https://plane.so"),
|
||||
title=module_data.get("link", {}).get(
|
||||
"title", "Original Issue"
|
||||
),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
for module, module_data in zip(modules, modules_data)
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
modules = Module.objects.filter(id__in=[module.id for module in modules])
|
||||
|
||||
bulk_module_issues = []
|
||||
for module, module_data in zip(modules, modules_data):
|
||||
module_issues_list = module_data.get("module_issues_list", [])
|
||||
bulk_module_issues = bulk_module_issues + [
|
||||
ModuleIssue(
|
||||
issue_id=issue,
|
||||
module=module,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
for issue in module_issues_list
|
||||
]
|
||||
if len(modules) == len(modules_data):
|
||||
_ = ModuleLink.objects.bulk_create(
|
||||
[
|
||||
ModuleLink(
|
||||
module=module,
|
||||
url=module_data.get("link", {}).get(
|
||||
"url", "https://plane.so"
|
||||
),
|
||||
title=module_data.get("link", {}).get(
|
||||
"title", "Original Issue"
|
||||
),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for module, module_data in zip(modules, modules_data)
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
_ = ModuleIssue.objects.bulk_create(
|
||||
bulk_module_issues, batch_size=100, ignore_conflicts=True
|
||||
)
|
||||
bulk_module_issues = []
|
||||
for module, module_data in zip(modules, modules_data):
|
||||
module_issues_list = module_data.get("module_issues_list", [])
|
||||
bulk_module_issues = bulk_module_issues + [
|
||||
ModuleIssue(
|
||||
issue_id=issue,
|
||||
module=module,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for issue in module_issues_list
|
||||
]
|
||||
|
||||
serializer = ModuleSerializer(modules, many=True)
|
||||
return Response(
|
||||
{"modules": serializer.data}, status=status.HTTP_201_CREATED
|
||||
)
|
||||
_ = ModuleIssue.objects.bulk_create(
|
||||
bulk_module_issues, batch_size=100, ignore_conflicts=True
|
||||
)
|
||||
|
||||
serializer = ModuleSerializer(modules, many=True)
|
||||
return Response(
|
||||
{"modules": serializer.data}, status=status.HTTP_201_CREATED
|
||||
)
|
||||
|
||||
else:
|
||||
return Response(
|
||||
{"message": "Modules created but issues could not be imported"},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Project.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||
|
||||
645
apiserver/plane/api/views/inbox.py
Normal file
645
apiserver/plane/api/views/inbox.py
Normal file
@@ -0,0 +1,645 @@
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
# Django import
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q, Count, OuterRef, Func, F, Prefetch
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from .base import BaseViewSet
|
||||
from plane.api.permissions import ProjectBasePermission, ProjectLitePermission
|
||||
from plane.db.models import (
|
||||
Inbox,
|
||||
InboxIssue,
|
||||
Issue,
|
||||
State,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
ProjectMember,
|
||||
ProjectDeployBoard,
|
||||
)
|
||||
from plane.api.serializers import (
|
||||
IssueSerializer,
|
||||
InboxSerializer,
|
||||
InboxIssueSerializer,
|
||||
IssueCreateSerializer,
|
||||
IssueStateInboxSerializer,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
|
||||
|
||||
class InboxViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectBasePermission,
|
||||
]
|
||||
|
||||
serializer_class = InboxSerializer
|
||||
model = Inbox
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
)
|
||||
.annotate(
|
||||
pending_issue_count=Count(
|
||||
"issue_inbox",
|
||||
filter=Q(issue_inbox__status=-2),
|
||||
)
|
||||
)
|
||||
.select_related("workspace", "project")
|
||||
)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save(project_id=self.kwargs.get("project_id"))
|
||||
|
||||
def destroy(self, request, slug, project_id, pk):
|
||||
try:
|
||||
inbox = Inbox.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
# Handle default inbox delete
|
||||
if inbox.is_default:
|
||||
return Response(
|
||||
{"error": "You cannot delete the default inbox"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
inbox.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wronf please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class InboxIssueViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectLitePermission,
|
||||
]
|
||||
|
||||
serializer_class = InboxIssueSerializer
|
||||
model = InboxIssue
|
||||
|
||||
filterset_fields = [
|
||||
"status",
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
inbox_id=self.kwargs.get("inbox_id"),
|
||||
)
|
||||
.select_related("issue", "workspace", "project")
|
||||
)
|
||||
|
||||
def list(self, request, slug, project_id, inbox_id):
|
||||
try:
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issues = (
|
||||
Issue.objects.filter(
|
||||
issue_inbox__inbox_id=inbox_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(bridge_id=F("issue_inbox__id"))
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels")
|
||||
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_inbox",
|
||||
queryset=InboxIssue.objects.only(
|
||||
"status", "duplicate_to", "snoozed_till", "source"
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
issues_data = IssueStateInboxSerializer(issues, many=True).data
|
||||
return Response(
|
||||
issues_data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, inbox_id):
|
||||
try:
|
||||
if not request.data.get("issue", {}).get("name", False):
|
||||
return Response(
|
||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Check for valid priority
|
||||
if not request.data.get("issue", {}).get("priority", None) in [
|
||||
"low",
|
||||
"medium",
|
||||
"high",
|
||||
"urgent",
|
||||
None,
|
||||
]:
|
||||
return Response(
|
||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Create or get state
|
||||
state, _ = State.objects.get_or_create(
|
||||
name="Triage",
|
||||
group="backlog",
|
||||
description="Default state for managing all Inbox Issues",
|
||||
project_id=project_id,
|
||||
color="#ff7700",
|
||||
)
|
||||
|
||||
# create an issue
|
||||
issue = Issue.objects.create(
|
||||
name=request.data.get("issue", {}).get("name"),
|
||||
description=request.data.get("issue", {}).get("description", {}),
|
||||
description_html=request.data.get("issue", {}).get(
|
||||
"description_html", "<p></p>"
|
||||
),
|
||||
priority=request.data.get("issue", {}).get("priority", "low"),
|
||||
project_id=project_id,
|
||||
state=state,
|
||||
)
|
||||
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
type="issue.activity.created",
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
)
|
||||
# create an inbox issue
|
||||
InboxIssue.objects.create(
|
||||
inbox_id=inbox_id,
|
||||
project_id=project_id,
|
||||
issue=issue,
|
||||
source=request.data.get("source", "in-app"),
|
||||
)
|
||||
|
||||
serializer = IssueStateInboxSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, project_id, inbox_id, pk):
|
||||
try:
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
# Get the project member
|
||||
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
|
||||
# Only project members admins and created_by users can access this endpoint
|
||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
|
||||
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Get issue data
|
||||
issue_data = request.data.pop("issue", False)
|
||||
|
||||
if bool(issue_data):
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
# Only allow guests and viewers to edit name and description
|
||||
if project_member.role <= 10:
|
||||
# viewers and guests since only viewers and guests
|
||||
issue_data = {
|
||||
"name": issue_data.get("name", issue.name),
|
||||
"description_html": issue_data.get("description_html", issue.description_html),
|
||||
"description": issue_data.get("description", issue.description)
|
||||
}
|
||||
|
||||
issue_serializer = IssueCreateSerializer(
|
||||
issue, data=issue_data, partial=True
|
||||
)
|
||||
|
||||
if issue_serializer.is_valid():
|
||||
current_instance = issue
|
||||
# Log all the updates
|
||||
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
|
||||
if issue is not None:
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(current_instance).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
)
|
||||
issue_serializer.save()
|
||||
else:
|
||||
return Response(
|
||||
issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Only project admins and members can edit inbox issue attributes
|
||||
if project_member.role > 10:
|
||||
serializer = InboxIssueSerializer(
|
||||
inbox_issue, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
# Update the issue state if the issue is rejected or marked as duplicate
|
||||
if serializer.data["status"] in [-1, 2]:
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
state = State.objects.filter(
|
||||
group="cancelled", workspace__slug=slug, project_id=project_id
|
||||
).first()
|
||||
if state is not None:
|
||||
issue.state = state
|
||||
issue.save()
|
||||
|
||||
# Update the issue state if it is accepted
|
||||
if serializer.data["status"] in [1]:
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
|
||||
# Update the issue state only if it is in triage state
|
||||
if issue.state.name == "Triage":
|
||||
# Move to default state
|
||||
state = State.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, default=True
|
||||
).first()
|
||||
if state is not None:
|
||||
issue.state = state
|
||||
issue.save()
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK)
|
||||
except InboxIssue.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Inbox Issue does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def retrieve(self, request, slug, project_id, inbox_id, pk):
|
||||
try:
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = IssueStateInboxSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, project_id, inbox_id, pk):
|
||||
try:
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
# Get the project member
|
||||
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
|
||||
|
||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
|
||||
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
inbox_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except InboxIssue.DoesNotExist:
|
||||
return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class InboxIssuePublicViewSet(BaseViewSet):
|
||||
serializer_class = InboxIssueSerializer
|
||||
model = InboxIssue
|
||||
|
||||
filterset_fields = [
|
||||
"status",
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=self.kwargs.get("slug"), project_id=self.kwargs.get("project_id"))
|
||||
if project_deploy_board is not None:
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
inbox_id=self.kwargs.get("inbox_id"),
|
||||
)
|
||||
.select_related("issue", "workspace", "project")
|
||||
)
|
||||
else:
|
||||
return InboxIssue.objects.none()
|
||||
|
||||
def list(self, request, slug, project_id, inbox_id):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issues = (
|
||||
Issue.objects.filter(
|
||||
issue_inbox__inbox_id=inbox_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(bridge_id=F("issue_inbox__id"))
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels")
|
||||
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_inbox",
|
||||
queryset=InboxIssue.objects.only(
|
||||
"status", "duplicate_to", "snoozed_till", "source"
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
issues_data = IssueStateInboxSerializer(issues, many=True).data
|
||||
return Response(
|
||||
issues_data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except ProjectDeployBoard.DoesNotExist:
|
||||
return Response({"error": "Project Deploy Board does not exist"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, inbox_id):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if not request.data.get("issue", {}).get("name", False):
|
||||
return Response(
|
||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Check for valid priority
|
||||
if not request.data.get("issue", {}).get("priority", None) in [
|
||||
"low",
|
||||
"medium",
|
||||
"high",
|
||||
"urgent",
|
||||
None,
|
||||
]:
|
||||
return Response(
|
||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Create or get state
|
||||
state, _ = State.objects.get_or_create(
|
||||
name="Triage",
|
||||
group="backlog",
|
||||
description="Default state for managing all Inbox Issues",
|
||||
project_id=project_id,
|
||||
color="#ff7700",
|
||||
)
|
||||
|
||||
# create an issue
|
||||
issue = Issue.objects.create(
|
||||
name=request.data.get("issue", {}).get("name"),
|
||||
description=request.data.get("issue", {}).get("description", {}),
|
||||
description_html=request.data.get("issue", {}).get(
|
||||
"description_html", "<p></p>"
|
||||
),
|
||||
priority=request.data.get("issue", {}).get("priority", "low"),
|
||||
project_id=project_id,
|
||||
state=state,
|
||||
)
|
||||
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
type="issue.activity.created",
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
)
|
||||
# create an inbox issue
|
||||
InboxIssue.objects.create(
|
||||
inbox_id=inbox_id,
|
||||
project_id=project_id,
|
||||
issue=issue,
|
||||
source=request.data.get("source", "in-app"),
|
||||
)
|
||||
|
||||
serializer = IssueStateInboxSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, project_id, inbox_id, pk):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
# Get the project member
|
||||
if str(inbox_issue.created_by_id) != str(request.user.id):
|
||||
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Get issue data
|
||||
issue_data = request.data.pop("issue", False)
|
||||
|
||||
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
# viewers and guests since only viewers and guests
|
||||
issue_data = {
|
||||
"name": issue_data.get("name", issue.name),
|
||||
"description_html": issue_data.get("description_html", issue.description_html),
|
||||
"description": issue_data.get("description", issue.description)
|
||||
}
|
||||
|
||||
issue_serializer = IssueCreateSerializer(
|
||||
issue, data=issue_data, partial=True
|
||||
)
|
||||
|
||||
if issue_serializer.is_valid():
|
||||
current_instance = issue
|
||||
# Log all the updates
|
||||
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
|
||||
if issue is not None:
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(current_instance).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
)
|
||||
issue_serializer.save()
|
||||
return Response(issue_serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except InboxIssue.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Inbox Issue does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def retrieve(self, request, slug, project_id, inbox_id, pk):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = IssueStateInboxSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, project_id, inbox_id, pk):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
|
||||
if str(inbox_issue.created_by_id) != str(request.user.id):
|
||||
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
inbox_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except InboxIssue.DoesNotExist:
|
||||
return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -6,3 +6,4 @@ from .github import (
|
||||
GithubCommentSyncViewSet,
|
||||
GithubRepositoriesEndpoint,
|
||||
)
|
||||
from .slack import SlackProjectSyncViewSet
|
||||
|
||||
@@ -27,6 +27,7 @@ from plane.utils.integrations.github import (
|
||||
)
|
||||
from plane.api.permissions import WorkSpaceAdminPermission
|
||||
|
||||
|
||||
class IntegrationViewSet(BaseViewSet):
|
||||
serializer_class = IntegrationSerializer
|
||||
model = Integration
|
||||
@@ -101,7 +102,6 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
@@ -112,21 +112,30 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
|
||||
|
||||
def create(self, request, slug, provider):
|
||||
try:
|
||||
installation_id = request.data.get("installation_id", None)
|
||||
|
||||
if not installation_id:
|
||||
return Response(
|
||||
{"error": "Installation ID is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
integration = Integration.objects.get(provider=provider)
|
||||
config = {}
|
||||
if provider == "github":
|
||||
installation_id = request.data.get("installation_id", None)
|
||||
if not installation_id:
|
||||
return Response(
|
||||
{"error": "Installation ID is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
metadata = get_github_metadata(installation_id)
|
||||
config = {"installation_id": installation_id}
|
||||
|
||||
if provider == "slack":
|
||||
metadata = request.data.get("metadata", {})
|
||||
access_token = metadata.get("access_token", False)
|
||||
team_id = metadata.get("team", {}).get("id", False)
|
||||
if not metadata or not access_token or not team_id:
|
||||
return Response(
|
||||
{"error": "Access token and team id is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
config = {"team_id": team_id, "access_token": access_token}
|
||||
|
||||
# Create a bot user
|
||||
bot_user = User.objects.create(
|
||||
email=f"{uuid.uuid4().hex}@plane.so",
|
||||
|
||||
73
apiserver/plane/api/views/integration/slack.py
Normal file
73
apiserver/plane/api/views/integration/slack.py
Normal file
@@ -0,0 +1,73 @@
|
||||
# Django import
|
||||
from django.db import IntegrityError
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.api.views import BaseViewSet, BaseAPIView
|
||||
from plane.db.models import SlackProjectSync, WorkspaceIntegration, ProjectMember
|
||||
from plane.api.serializers import SlackProjectSyncSerializer
|
||||
from plane.api.permissions import ProjectBasePermission, ProjectEntityPermission
|
||||
|
||||
|
||||
class SlackProjectSyncViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectBasePermission,
|
||||
]
|
||||
serializer_class = SlackProjectSyncSerializer
|
||||
model = SlackProjectSync
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
)
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, workspace_integration_id):
|
||||
try:
|
||||
serializer = SlackProjectSyncSerializer(data=request.data)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
workspace__slug=slug, pk=workspace_integration_id
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
project_id=project_id,
|
||||
workspace_integration_id=workspace_integration_id,
|
||||
)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
pk=workspace_integration_id, workspace__slug=slug
|
||||
)
|
||||
|
||||
_ = ProjectMember.objects.get_or_create(
|
||||
member=workspace_integration.actor, role=20, project_id=project_id
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except IntegrityError:
|
||||
return Response(
|
||||
{"error": "Slack is already enabled for the project"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except WorkspaceIntegration.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Workspace Integration does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -37,7 +37,7 @@ from plane.db.models import (
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.utils.grouper import group_results
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
|
||||
class ModuleViewSet(BaseViewSet):
|
||||
model = Module
|
||||
@@ -53,6 +53,8 @@ class ModuleViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
def get_queryset(self):
|
||||
order_by = self.request.GET.get("order_by", "sort_order")
|
||||
|
||||
subquery = ModuleFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
module_id=OuterRef("pk"),
|
||||
@@ -106,9 +108,31 @@ class ModuleViewSet(BaseViewSet):
|
||||
filter=Q(issue_module__issue__state__group="backlog"),
|
||||
)
|
||||
)
|
||||
.order_by("-is_favorite", "name")
|
||||
.order_by(order_by, "name")
|
||||
)
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
module_issues = list(
|
||||
ModuleIssue.objects.filter(module_id=self.kwargs.get("pk")).values_list(
|
||||
"issue", flat=True
|
||||
)
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="module.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"module_id": str(self.kwargs.get("pk")),
|
||||
"issues": [str(issue_id) for issue_id in module_issues],
|
||||
}
|
||||
),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("pk", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=None,
|
||||
)
|
||||
|
||||
return super().perform_destroy(instance)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
try:
|
||||
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
||||
@@ -138,6 +162,88 @@ class ModuleViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def retrieve(self, request, slug, project_id, pk):
|
||||
try:
|
||||
queryset = self.get_queryset().get(pk=pk)
|
||||
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_module__module_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(first_name=F("assignees__first_name"))
|
||||
.annotate(last_name=F("assignees__last_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.values("first_name", "last_name", "assignee_id", "avatar", "display_name")
|
||||
.annotate(total_issues=Count("assignee_id"))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"assignee_id",
|
||||
filter=Q(completed_at__isnull=False),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"assignee_id",
|
||||
filter=Q(completed_at__isnull=True),
|
||||
)
|
||||
)
|
||||
.order_by("first_name", "last_name")
|
||||
)
|
||||
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_module__module_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(total_issues=Count("label_id"))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"label_id",
|
||||
filter=Q(completed_at__isnull=False),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"label_id",
|
||||
filter=Q(completed_at__isnull=True),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
|
||||
data = ModuleSerializer(queryset).data
|
||||
data["distribution"] = {
|
||||
"assignees": assignee_distribution,
|
||||
"labels": label_distribution,
|
||||
"completion_chart": {},
|
||||
}
|
||||
|
||||
if queryset.start_date and queryset.target_date:
|
||||
data["distribution"]["completion_chart"] = burndown_plot(
|
||||
queryset=queryset, slug=slug, project_id=project_id, module_id=pk
|
||||
)
|
||||
|
||||
return Response(
|
||||
data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class ModuleIssueViewSet(BaseViewSet):
|
||||
serializer_class = ModuleIssueSerializer
|
||||
@@ -158,12 +264,28 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
module_id=self.kwargs.get("module_id"),
|
||||
)
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
issue_activity.delay(
|
||||
type="module.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"module_id": str(self.kwargs.get("module_id")),
|
||||
"issues": [str(instance.issue_id)],
|
||||
}
|
||||
),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("pk", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=None,
|
||||
)
|
||||
return super().perform_destroy(instance)
|
||||
|
||||
def get_queryset(self):
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
.annotate(
|
||||
sub_issues_count=Issue.objects.filter(parent=OuterRef("issue"))
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
@@ -188,9 +310,9 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
group_by = request.GET.get("group_by", False)
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issues = (
|
||||
Issue.objects.filter(issue_module__module_id=module_id)
|
||||
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.objects.filter(parent=OuterRef("id"))
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
@@ -302,7 +424,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
type="module.activity.created",
|
||||
requested_data=json.dumps({"modules_list": issues}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("pk", None)),
|
||||
@@ -361,9 +483,6 @@ class ModuleLinkViewSet(BaseViewSet):
|
||||
|
||||
|
||||
class ModuleFavoriteViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
serializer_class = ModuleFavoriteSerializer
|
||||
model = ModuleFavorite
|
||||
|
||||
276
apiserver/plane/api/views/notification.py
Normal file
276
apiserver/plane/api/views/notification.py
Normal file
@@ -0,0 +1,276 @@
|
||||
# Django imports
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from sentry_sdk import capture_exception
|
||||
from plane.utils.paginator import BasePaginator
|
||||
|
||||
# Module imports
|
||||
from .base import BaseViewSet, BaseAPIView
|
||||
from plane.db.models import Notification, IssueAssignee, IssueSubscriber, Issue, WorkspaceMember
|
||||
from plane.api.serializers import NotificationSerializer
|
||||
|
||||
|
||||
class NotificationViewSet(BaseViewSet, BasePaginator):
|
||||
model = Notification
|
||||
serializer_class = NotificationSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
receiver_id=self.request.user.id,
|
||||
)
|
||||
.select_related("workspace", "project," "triggered_by", "receiver")
|
||||
)
|
||||
|
||||
def list(self, request, slug):
|
||||
try:
|
||||
snoozed = request.GET.get("snoozed", "false")
|
||||
archived = request.GET.get("archived", "false")
|
||||
read = request.GET.get("read", "true")
|
||||
|
||||
# Filter type
|
||||
type = request.GET.get("type", "all")
|
||||
|
||||
notifications = (
|
||||
Notification.objects.filter(
|
||||
workspace__slug=slug, receiver_id=request.user.id
|
||||
)
|
||||
.select_related("workspace", "project", "triggered_by", "receiver")
|
||||
.order_by("snoozed_till", "-created_at")
|
||||
)
|
||||
|
||||
# Filter for snoozed notifications
|
||||
if snoozed == "false":
|
||||
notifications = notifications.filter(
|
||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
||||
)
|
||||
|
||||
if snoozed == "true":
|
||||
notifications = notifications.filter(
|
||||
Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False)
|
||||
)
|
||||
|
||||
if read == "false":
|
||||
notifications = notifications.filter(read_at__isnull=True)
|
||||
|
||||
# Filter for archived or unarchive
|
||||
if archived == "false":
|
||||
notifications = notifications.filter(archived_at__isnull=True)
|
||||
|
||||
if archived == "true":
|
||||
notifications = notifications.filter(archived_at__isnull=False)
|
||||
|
||||
# Subscribed issues
|
||||
if type == "watching":
|
||||
issue_ids = IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug, subscriber_id=request.user.id
|
||||
).values_list("issue_id", flat=True)
|
||||
notifications = notifications.filter(entity_identifier__in=issue_ids)
|
||||
|
||||
# Assigned Issues
|
||||
if type == "assigned":
|
||||
issue_ids = IssueAssignee.objects.filter(
|
||||
workspace__slug=slug, assignee_id=request.user.id
|
||||
).values_list("issue_id", flat=True)
|
||||
notifications = notifications.filter(entity_identifier__in=issue_ids)
|
||||
|
||||
# Created issues
|
||||
if type == "created":
|
||||
if WorkspaceMember.objects.filter(workspace__slug=slug, member=request.user, role__lt=15).exists():
|
||||
notifications = Notification.objects.none()
|
||||
else:
|
||||
issue_ids = Issue.objects.filter(
|
||||
workspace__slug=slug, created_by=request.user
|
||||
).values_list("pk", flat=True)
|
||||
notifications = notifications.filter(entity_identifier__in=issue_ids)
|
||||
|
||||
# Pagination
|
||||
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(notifications),
|
||||
on_results=lambda notifications: NotificationSerializer(
|
||||
notifications, many=True
|
||||
).data,
|
||||
)
|
||||
|
||||
serializer = NotificationSerializer(notifications, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
workspace__slug=slug, pk=pk, receiver=request.user
|
||||
)
|
||||
# Only read_at and snoozed_till can be updated
|
||||
notification_data = {
|
||||
"snoozed_till": request.data.get("snoozed_till", None),
|
||||
}
|
||||
serializer = NotificationSerializer(
|
||||
notification, data=notification_data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def mark_read(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.read_at = timezone.now()
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def mark_unread(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.read_at = None
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def archive(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.archived_at = timezone.now()
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def unarchive(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.archived_at = None
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class UnreadNotificationEndpoint(BaseAPIView):
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
# Watching Issues Count
|
||||
watching_issues_count = Notification.objects.filter(
|
||||
workspace__slug=slug,
|
||||
receiver_id=request.user.id,
|
||||
read_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
entity_identifier__in=IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug, subscriber_id=request.user.id
|
||||
).values_list("issue_id", flat=True),
|
||||
).count()
|
||||
|
||||
# My Issues Count
|
||||
my_issues_count = Notification.objects.filter(
|
||||
workspace__slug=slug,
|
||||
receiver_id=request.user.id,
|
||||
read_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
entity_identifier__in=IssueAssignee.objects.filter(
|
||||
workspace__slug=slug, assignee_id=request.user.id
|
||||
).values_list("issue_id", flat=True),
|
||||
).count()
|
||||
|
||||
# Created Issues Count
|
||||
created_issues_count = Notification.objects.filter(
|
||||
workspace__slug=slug,
|
||||
receiver_id=request.user.id,
|
||||
read_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
entity_identifier__in=Issue.objects.filter(
|
||||
workspace__slug=slug, created_by=request.user
|
||||
).values_list("pk", flat=True),
|
||||
).count()
|
||||
|
||||
return Response(
|
||||
{
|
||||
"watching_issues": watching_issues_count,
|
||||
"my_issues": my_issues_count,
|
||||
"created_issues": created_issues_count,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
@@ -96,6 +96,86 @@ class PageViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
try:
|
||||
page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
|
||||
# Only update access if the page owner is the requesting user
|
||||
if (
|
||||
page.access != request.data.get("access", page.access)
|
||||
and page.owned_by_id != request.user.id
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error": "Access cannot be updated since this page is owned by someone else"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
serializer = PageSerializer(page, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Page.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Page Does not exist"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def list(self, request, slug, project_id):
|
||||
try:
|
||||
queryset = self.get_queryset()
|
||||
page_view = request.GET.get("page_view", False)
|
||||
|
||||
if not page_view:
|
||||
return Response({"error": "Page View parameter is required"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# All Pages
|
||||
if page_view == "all":
|
||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
||||
|
||||
# Recent pages
|
||||
if page_view == "recent":
|
||||
current_time = date.today()
|
||||
day_before = current_time - timedelta(days=1)
|
||||
todays_pages = queryset.filter(updated_at__date=date.today())
|
||||
yesterdays_pages = queryset.filter(updated_at__date=day_before)
|
||||
earlier_this_week = queryset.filter( updated_at__date__range=(
|
||||
(timezone.now() - timedelta(days=7)),
|
||||
(timezone.now() - timedelta(days=2)),
|
||||
))
|
||||
return Response(
|
||||
{
|
||||
"today": PageSerializer(todays_pages, many=True).data,
|
||||
"yesterday": PageSerializer(yesterdays_pages, many=True).data,
|
||||
"earlier_this_week": PageSerializer(earlier_this_week, many=True).data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
# Favorite Pages
|
||||
if page_view == "favorite":
|
||||
queryset = queryset.filter(is_favorite=True)
|
||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
||||
|
||||
# My pages
|
||||
if page_view == "created_by_me":
|
||||
queryset = queryset.filter(owned_by=request.user)
|
||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
||||
|
||||
# Created by other Pages
|
||||
if page_view == "created_by_other":
|
||||
queryset = queryset.filter(~Q(owned_by=request.user), access=0)
|
||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
||||
|
||||
return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
class PageBlockViewSet(BaseViewSet):
|
||||
serializer_class = PageBlockSerializer
|
||||
@@ -221,7 +301,7 @@ class CreateIssueFromPageBlockEndpoint(BaseAPIView):
|
||||
issue=issue,
|
||||
actor=request.user,
|
||||
project_id=project_id,
|
||||
comment=f"{request.user.email} created the issue from {page_block.name} block",
|
||||
comment=f"created the issue from {page_block.name} block",
|
||||
verb="created",
|
||||
)
|
||||
|
||||
@@ -239,249 +319,3 @@ class CreateIssueFromPageBlockEndpoint(BaseAPIView):
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class RecentPagesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
subquery = PageFavorite.objects.filter(
|
||||
user=request.user,
|
||||
page_id=OuterRef("pk"),
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
current_time = date.today()
|
||||
day_before = current_time - timedelta(days=1)
|
||||
|
||||
todays_pages = (
|
||||
Page.objects.filter(
|
||||
updated_at__date=date.today(),
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.filter(project__project_projectmember__member=request.user)
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.filter(Q(owned_by=self.request.user) | Q(access=0))
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.prefetch_related("labels")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"blocks",
|
||||
queryset=PageBlock.objects.select_related(
|
||||
"page", "issue", "workspace", "project"
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("-is_favorite", "-updated_at")
|
||||
)
|
||||
|
||||
yesterdays_pages = (
|
||||
Page.objects.filter(
|
||||
updated_at__date=day_before,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.filter(project__project_projectmember__member=request.user)
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.filter(Q(owned_by=self.request.user) | Q(access=0))
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.prefetch_related("labels")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"blocks",
|
||||
queryset=PageBlock.objects.select_related(
|
||||
"page", "issue", "workspace", "project"
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("-is_favorite", "-updated_at")
|
||||
)
|
||||
|
||||
earlier_this_week = (
|
||||
Page.objects.filter(
|
||||
updated_at__date__range=(
|
||||
(timezone.now() - timedelta(days=7)),
|
||||
(timezone.now() - timedelta(days=2)),
|
||||
),
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.filter(Q(owned_by=self.request.user) | Q(access=0))
|
||||
.filter(project__project_projectmember__member=request.user)
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.prefetch_related("labels")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"blocks",
|
||||
queryset=PageBlock.objects.select_related(
|
||||
"page", "issue", "workspace", "project"
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("-is_favorite", "-updated_at")
|
||||
)
|
||||
todays_pages_serializer = PageSerializer(todays_pages, many=True)
|
||||
yesterday_pages_serializer = PageSerializer(yesterdays_pages, many=True)
|
||||
earlier_this_week_serializer = PageSerializer(earlier_this_week, many=True)
|
||||
return Response(
|
||||
{
|
||||
"today": todays_pages_serializer.data,
|
||||
"yesterday": yesterday_pages_serializer.data,
|
||||
"earlier_this_week": earlier_this_week_serializer.data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class FavoritePagesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
subquery = PageFavorite.objects.filter(
|
||||
user=request.user,
|
||||
page_id=OuterRef("pk"),
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
pages = (
|
||||
Page.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.filter(Q(owned_by=self.request.user) | Q(access=0))
|
||||
.filter(project__project_projectmember__member=request.user)
|
||||
.filter(is_favorite=True)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.prefetch_related("labels")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"blocks",
|
||||
queryset=PageBlock.objects.select_related(
|
||||
"page", "issue", "workspace", "project"
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("name", "-is_favorite")
|
||||
)
|
||||
|
||||
serializer = PageSerializer(pages, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class MyPagesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
subquery = PageFavorite.objects.filter(
|
||||
user=request.user,
|
||||
page_id=OuterRef("pk"),
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
pages = (
|
||||
Page.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, owned_by=request.user
|
||||
)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.prefetch_related("labels")
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.filter(Q(owned_by=self.request.user) | Q(access=0))
|
||||
.filter(project__project_projectmember__member=request.user)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"blocks",
|
||||
queryset=PageBlock.objects.select_related(
|
||||
"page", "issue", "workspace", "project"
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("-is_favorite", "name")
|
||||
)
|
||||
serializer = PageSerializer(pages, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class CreatedbyOtherPagesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
subquery = PageFavorite.objects.filter(
|
||||
user=request.user,
|
||||
page_id=OuterRef("pk"),
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
pages = (
|
||||
Page.objects.filter(
|
||||
~Q(owned_by=request.user),
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
access=0,
|
||||
)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.prefetch_related("labels")
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"blocks",
|
||||
queryset=PageBlock.objects.select_related(
|
||||
"page", "issue", "workspace", "project"
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("-is_favorite", "name")
|
||||
)
|
||||
serializer = PageSerializer(pages, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -5,7 +5,21 @@ from datetime import datetime
|
||||
# Django imports
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import Q, Exists, OuterRef
|
||||
from django.db.models import (
|
||||
Q,
|
||||
Exists,
|
||||
OuterRef,
|
||||
Func,
|
||||
F,
|
||||
Max,
|
||||
CharField,
|
||||
Func,
|
||||
Subquery,
|
||||
Prefetch,
|
||||
When,
|
||||
Case,
|
||||
Value,
|
||||
)
|
||||
from django.core.validators import validate_email
|
||||
from django.conf import settings
|
||||
|
||||
@@ -13,6 +27,7 @@ from django.conf import settings
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework import serializers
|
||||
from rest_framework.permissions import AllowAny
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
@@ -23,9 +38,16 @@ from plane.api.serializers import (
|
||||
ProjectDetailSerializer,
|
||||
ProjectMemberInviteSerializer,
|
||||
ProjectFavoriteSerializer,
|
||||
IssueLiteSerializer,
|
||||
ProjectDeployBoardSerializer,
|
||||
ProjectMemberAdminSerializer,
|
||||
)
|
||||
|
||||
from plane.api.permissions import ProjectBasePermission
|
||||
from plane.api.permissions import (
|
||||
ProjectBasePermission,
|
||||
ProjectEntityPermission,
|
||||
ProjectMemberPermission,
|
||||
)
|
||||
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
@@ -37,17 +59,28 @@ from plane.db.models import (
|
||||
State,
|
||||
TeamMember,
|
||||
ProjectFavorite,
|
||||
ProjectIdentifier,
|
||||
Module,
|
||||
Cycle,
|
||||
CycleFavorite,
|
||||
ModuleFavorite,
|
||||
PageFavorite,
|
||||
IssueViewFavorite,
|
||||
Page,
|
||||
IssueAssignee,
|
||||
ModuleMember,
|
||||
Inbox,
|
||||
ProjectDeployBoard,
|
||||
Issue,
|
||||
IssueReaction,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
Label,
|
||||
)
|
||||
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
ProjectMember,
|
||||
Workspace,
|
||||
ProjectMemberInvite,
|
||||
User,
|
||||
ProjectIdentifier,
|
||||
)
|
||||
from plane.bgtasks.project_invitation_task import project_invitation
|
||||
from plane.utils.grouper import group_results
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
|
||||
class ProjectViewSet(BaseViewSet):
|
||||
@@ -69,6 +102,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
@@ -78,21 +112,97 @@ class ProjectViewSet(BaseViewSet):
|
||||
"workspace", "workspace__owner", "default_assignee", "project_lead"
|
||||
)
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.annotate(
|
||||
is_member=Exists(
|
||||
ProjectMember.objects.filter(
|
||||
member=self.request.user,
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
total_members=ProjectMember.objects.filter(
|
||||
project_id=OuterRef("id"), member__is_bot=False
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
total_cycles=Cycle.objects.filter(project_id=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
total_modules=Module.objects.filter(project_id=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
member_role=ProjectMember.objects.filter(
|
||||
project_id=OuterRef("pk"),
|
||||
member_id=self.request.user.id,
|
||||
).values("role")
|
||||
)
|
||||
.annotate(
|
||||
is_deployed=Exists(
|
||||
ProjectDeployBoard.objects.filter(
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
)
|
||||
)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def list(self, request, slug):
|
||||
try:
|
||||
is_favorite = request.GET.get("is_favorite", "all")
|
||||
subquery = ProjectFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
sort_order_query = ProjectMember.objects.filter(
|
||||
member=request.user,
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
).values("sort_order")
|
||||
projects = (
|
||||
self.get_queryset()
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.order_by("-is_favorite", "name")
|
||||
.annotate(sort_order=Subquery(sort_order_query))
|
||||
.order_by("sort_order", "name")
|
||||
.annotate(
|
||||
total_members=ProjectMember.objects.filter(
|
||||
project_id=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
total_cycles=Cycle.objects.filter(project_id=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
total_modules=Module.objects.filter(project_id=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
)
|
||||
|
||||
if is_favorite == "true":
|
||||
projects = projects.filter(is_favorite=True)
|
||||
if is_favorite == "false":
|
||||
projects = projects.filter(is_favorite=False)
|
||||
|
||||
return Response(ProjectDetailSerializer(projects, many=True).data)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
@@ -111,41 +221,50 @@ class ProjectViewSet(BaseViewSet):
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
|
||||
## Add the user as Administrator to the project
|
||||
ProjectMember.objects.create(
|
||||
# Add the user as Administrator to the project
|
||||
project_member = ProjectMember.objects.create(
|
||||
project_id=serializer.data["id"], member=request.user, role=20
|
||||
)
|
||||
|
||||
## Default states
|
||||
if serializer.data["project_lead"] is not None and str(
|
||||
serializer.data["project_lead"]
|
||||
) != str(request.user.id):
|
||||
ProjectMember.objects.create(
|
||||
project_id=serializer.data["id"],
|
||||
member_id=serializer.data["project_lead"],
|
||||
role=20,
|
||||
)
|
||||
|
||||
# Default states
|
||||
states = [
|
||||
{
|
||||
"name": "Backlog",
|
||||
"color": "#5e6ad2",
|
||||
"color": "#A3A3A3",
|
||||
"sequence": 15000,
|
||||
"group": "backlog",
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"name": "Todo",
|
||||
"color": "#eb5757",
|
||||
"color": "#3A3A3A",
|
||||
"sequence": 25000,
|
||||
"group": "unstarted",
|
||||
},
|
||||
{
|
||||
"name": "In Progress",
|
||||
"color": "#26b5ce",
|
||||
"color": "#F59E0B",
|
||||
"sequence": 35000,
|
||||
"group": "started",
|
||||
},
|
||||
{
|
||||
"name": "Done",
|
||||
"color": "#f2c94c",
|
||||
"color": "#16A34A",
|
||||
"sequence": 45000,
|
||||
"group": "completed",
|
||||
},
|
||||
{
|
||||
"name": "Cancelled",
|
||||
"color": "#4cb782",
|
||||
"color": "#EF4444",
|
||||
"sequence": 55000,
|
||||
"group": "cancelled",
|
||||
},
|
||||
@@ -161,14 +280,17 @@ class ProjectViewSet(BaseViewSet):
|
||||
workspace=serializer.instance.workspace,
|
||||
group=state["group"],
|
||||
default=state.get("default", False),
|
||||
created_by=request.user,
|
||||
)
|
||||
for state in states
|
||||
]
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
data = serializer.data
|
||||
data["sort_order"] = project_member.sort_order
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
return Response(
|
||||
[serializer.errors[error][0] for error in serializer.errors],
|
||||
serializer.errors,
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except IntegrityError as e:
|
||||
@@ -214,6 +336,20 @@ class ProjectViewSet(BaseViewSet):
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
if serializer.data["inbox_view"]:
|
||||
Inbox.objects.get_or_create(
|
||||
name=f"{project.name} Inbox", project=project, is_default=True
|
||||
)
|
||||
|
||||
# Create the triage state in Backlog group
|
||||
State.objects.get_or_create(
|
||||
name="Triage",
|
||||
group="backlog",
|
||||
description="Default state for managing all Inbox Issues",
|
||||
project_id=pk,
|
||||
color="#ff7700",
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@@ -259,7 +395,9 @@ class InviteProjectEndpoint(BaseAPIView):
|
||||
validate_email(email)
|
||||
# Check if user is already a member of workspace
|
||||
if ProjectMember.objects.filter(
|
||||
project_id=project_id, member__email=email
|
||||
project_id=project_id,
|
||||
member__email=email,
|
||||
member__is_bot=False,
|
||||
).exists():
|
||||
return Response(
|
||||
{"error": "User is already member of workspace"},
|
||||
@@ -344,12 +482,13 @@ class UserProjectInvitationsViewset(BaseViewSet):
|
||||
workspace=invitation.project.workspace,
|
||||
member=request.user,
|
||||
role=invitation.role,
|
||||
created_by=request.user,
|
||||
)
|
||||
for invitation in project_invitations
|
||||
]
|
||||
)
|
||||
|
||||
## Delete joined project invites
|
||||
# Delete joined project invites
|
||||
project_invitations.delete()
|
||||
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
@@ -362,14 +501,14 @@ class UserProjectInvitationsViewset(BaseViewSet):
|
||||
|
||||
|
||||
class ProjectMemberViewSet(BaseViewSet):
|
||||
serializer_class = ProjectMemberSerializer
|
||||
serializer_class = ProjectMemberAdminSerializer
|
||||
model = ProjectMember
|
||||
permission_classes = [
|
||||
ProjectBasePermission,
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
"member__email",
|
||||
"member__display_name",
|
||||
"member__first_name",
|
||||
]
|
||||
|
||||
@@ -385,6 +524,114 @@ class ProjectMemberViewSet(BaseViewSet):
|
||||
.select_related("workspace", "workspace__owner")
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
try:
|
||||
project_member = ProjectMember.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
if request.user.id == project_member.member_id:
|
||||
return Response(
|
||||
{"error": "You cannot update your own role"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
# Check while updating user roles
|
||||
requested_project_member = ProjectMember.objects.get(
|
||||
project_id=project_id, workspace__slug=slug, member=request.user
|
||||
)
|
||||
if (
|
||||
"role" in request.data
|
||||
and int(request.data.get("role", project_member.role))
|
||||
> requested_project_member.role
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error": "You cannot update a role that is higher than your own role"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = ProjectMemberSerializer(
|
||||
project_member, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except ProjectMember.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Project Member does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, project_id, pk):
|
||||
try:
|
||||
project_member = ProjectMember.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
# check requesting user role
|
||||
requesting_project_member = ProjectMember.objects.get(
|
||||
workspace__slug=slug, member=request.user, project_id=project_id
|
||||
)
|
||||
if requesting_project_member.role < project_member.role:
|
||||
return Response(
|
||||
{
|
||||
"error": "You cannot remove a user having role higher than yourself"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Remove all favorites
|
||||
ProjectFavorite.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, user=project_member.member
|
||||
).delete()
|
||||
CycleFavorite.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, user=project_member.member
|
||||
).delete()
|
||||
ModuleFavorite.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, user=project_member.member
|
||||
).delete()
|
||||
PageFavorite.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, user=project_member.member
|
||||
).delete()
|
||||
IssueViewFavorite.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, user=project_member.member
|
||||
).delete()
|
||||
# Also remove issue from issue assigned
|
||||
IssueAssignee.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
assignee=project_member.member,
|
||||
).delete()
|
||||
|
||||
# Remove if module member
|
||||
ModuleMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=project_member.member,
|
||||
).delete()
|
||||
# Delete owned Pages
|
||||
Page.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
owned_by=project_member.member,
|
||||
).delete()
|
||||
project_member.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except ProjectMember.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Project Member does not exist"}, status=status.HTTP_400
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response({"error": "Something went wrong please try again later"})
|
||||
|
||||
|
||||
class AddMemberToProjectEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
@@ -393,45 +640,66 @@ class AddMemberToProjectEndpoint(BaseAPIView):
|
||||
|
||||
def post(self, request, slug, project_id):
|
||||
try:
|
||||
member_id = request.data.get("member_id", False)
|
||||
role = request.data.get("role", False)
|
||||
members = request.data.get("members", [])
|
||||
|
||||
if not member_id or not role:
|
||||
# get the project
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
|
||||
if not len(members):
|
||||
return Response(
|
||||
{"error": "Member ID and role is required"},
|
||||
{"error": "Atleast one member is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
bulk_project_members = []
|
||||
|
||||
# Check if the user is a member in the workspace
|
||||
if not WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug, member_id=member_id
|
||||
).exists():
|
||||
# TODO: Update this error message - nk
|
||||
return Response(
|
||||
{
|
||||
"error": "User is not a member of the workspace. Invite the user to the workspace to add him to project"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
project_members = (
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member_id__in=[member.get("member_id") for member in members],
|
||||
)
|
||||
|
||||
# Check if the user is already member of project
|
||||
if ProjectMember.objects.filter(
|
||||
project=project_id, member_id=member_id
|
||||
).exists():
|
||||
return Response(
|
||||
{"error": "User is already a member of the project"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Add the user to project
|
||||
project_member = ProjectMember.objects.create(
|
||||
project_id=project_id, member_id=member_id, role=role
|
||||
.values("member_id", "sort_order")
|
||||
.order_by("sort_order")
|
||||
)
|
||||
|
||||
serializer = ProjectMemberSerializer(project_member)
|
||||
for member in members:
|
||||
sort_order = [
|
||||
project_member.get("sort_order")
|
||||
for project_member in project_members
|
||||
if str(project_member.get("member_id"))
|
||||
== str(member.get("member_id"))
|
||||
]
|
||||
bulk_project_members.append(
|
||||
ProjectMember(
|
||||
member_id=member.get("member_id"),
|
||||
role=member.get("role", 10),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
sort_order=sort_order[0] - 10000 if len(sort_order) else 65535,
|
||||
)
|
||||
)
|
||||
|
||||
project_members = ProjectMember.objects.bulk_create(
|
||||
bulk_project_members,
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
serializer = ProjectMemberSerializer(project_members, many=True)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
except KeyError:
|
||||
return Response(
|
||||
{"error": "Incorrect data sent"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Project.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Project does not exist"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except IntegrityError:
|
||||
return Response(
|
||||
{"error": "User not member of the workspace"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
@@ -465,6 +733,7 @@ class AddTeamToProjectEndpoint(BaseAPIView):
|
||||
project_id=project_id,
|
||||
member_id=member,
|
||||
workspace=workspace,
|
||||
created_by=request.user,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -612,6 +881,7 @@ class ProjectJoinEndpoint(BaseAPIView):
|
||||
if workspace_role >= 15
|
||||
else (15 if workspace_role == 10 else workspace_role),
|
||||
workspace=workspace,
|
||||
created_by=request.user,
|
||||
)
|
||||
for project_id in project_ids
|
||||
],
|
||||
@@ -651,11 +921,15 @@ class ProjectUserViewsEndpoint(BaseAPIView):
|
||||
|
||||
view_props = project_member.view_props
|
||||
default_props = project_member.default_props
|
||||
preferences = project_member.preferences
|
||||
sort_order = project_member.sort_order
|
||||
|
||||
project_member.view_props = request.data.get("view_props", view_props)
|
||||
project_member.default_props = request.data.get(
|
||||
"default_props", default_props
|
||||
)
|
||||
project_member.preferences = request.data.get("preferences", preferences)
|
||||
project_member.sort_order = request.data.get("sort_order", sort_order)
|
||||
|
||||
project_member.save()
|
||||
|
||||
@@ -760,3 +1034,255 @@ class ProjectFavoritesViewSet(BaseViewSet):
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class ProjectDeployBoardViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectMemberPermission,
|
||||
]
|
||||
serializer_class = ProjectDeployBoardSerializer
|
||||
model = ProjectDeployBoard
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
)
|
||||
.select_related("project")
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
try:
|
||||
comments = request.data.get("comments", False)
|
||||
reactions = request.data.get("reactions", False)
|
||||
inbox = request.data.get("inbox", None)
|
||||
votes = request.data.get("votes", False)
|
||||
views = request.data.get(
|
||||
"views",
|
||||
{
|
||||
"list": True,
|
||||
"kanban": True,
|
||||
"calendar": True,
|
||||
"gantt": True,
|
||||
"spreadsheet": True,
|
||||
},
|
||||
)
|
||||
|
||||
project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create(
|
||||
anchor=f"{slug}/{project_id}",
|
||||
project_id=project_id,
|
||||
)
|
||||
project_deploy_board.comments = comments
|
||||
project_deploy_board.reactions = reactions
|
||||
project_deploy_board.inbox = inbox
|
||||
project_deploy_board.votes = votes
|
||||
project_deploy_board.views = views
|
||||
|
||||
project_deploy_board.save()
|
||||
|
||||
serializer = ProjectDeployBoardSerializer(project_deploy_board)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class ProjectMemberEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
project_members = ProjectMember.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
member__is_bot=False,
|
||||
).select_related("project", "member")
|
||||
serializer = ProjectMemberSerializer(project_members, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = ProjectDeployBoardSerializer(project_deploy_board)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except ProjectDeployBoard.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Project Deploy Board does not exists"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class ProjectDeployBoardIssuesPublicEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
# Custom ordering for priority and state
|
||||
priority_order = ["urgent", "high", "medium", "low", None]
|
||||
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
|
||||
issue_queryset = (
|
||||
Issue.issue_objects.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.filter(project_id=project_id)
|
||||
.filter(workspace__slug=slug)
|
||||
.select_related("project", "workspace", "state", "parent")
|
||||
.prefetch_related("assignees", "labels")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
queryset=IssueReaction.objects.select_related("actor"),
|
||||
)
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(module_id=F("issue_module__module_id"))
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
)
|
||||
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
priority_order = (
|
||||
priority_order
|
||||
if order_by_param == "priority"
|
||||
else priority_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
|
||||
# State Ordering
|
||||
elif order_by_param in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
"-state__name",
|
||||
"-state__group",
|
||||
]:
|
||||
state_order = (
|
||||
state_order
|
||||
if order_by_param in ["state__name", "state__group"]
|
||||
else state_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
state_order=Case(
|
||||
*[
|
||||
When(state__group=state_group, then=Value(i))
|
||||
for i, state_group in enumerate(state_order)
|
||||
],
|
||||
default=Value(len(state_order)),
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("state_order")
|
||||
# assignee and label ordering
|
||||
elif order_by_param in [
|
||||
"labels__name",
|
||||
"-labels__name",
|
||||
"assignees__first_name",
|
||||
"-assignees__first_name",
|
||||
]:
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
max_values=Max(
|
||||
order_by_param[1::]
|
||||
if order_by_param.startswith("-")
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-max_values" if order_by_param.startswith("-") else "max_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True).data
|
||||
|
||||
states = State.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values("name", "group", "color", "id")
|
||||
|
||||
labels = Label.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values("id", "name", "color", "parent")
|
||||
|
||||
## Grouping the results
|
||||
group_by = request.GET.get("group_by", False)
|
||||
if group_by:
|
||||
issues = group_results(issues, group_by)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"issues": issues,
|
||||
"states": states,
|
||||
"labels": labels,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except ProjectDeployBoard.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Board does not exists"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -20,7 +20,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
also show related workspace if found
|
||||
"""
|
||||
|
||||
def filter_workspaces(self, query, slug, project_id):
|
||||
def filter_workspaces(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
@@ -31,8 +31,8 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
.values("name", "id", "slug")
|
||||
)
|
||||
|
||||
def filter_projects(self, query, slug, project_id):
|
||||
fields = ["name"]
|
||||
def filter_projects(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name", "identifier"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
@@ -46,8 +46,8 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
.values("name", "id", "identifier", "workspace__slug")
|
||||
)
|
||||
|
||||
def filter_issues(self, query, slug, project_id):
|
||||
fields = ["name", "sequence_id"]
|
||||
def filter_issues(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name", "sequence_id", "project__identifier"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
if field == "sequence_id":
|
||||
@@ -56,111 +56,123 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
q |= Q(**{"sequence_id": sequence_id})
|
||||
else:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Issue.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
issues = Issue.issue_objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def filter_cycles(self, query, slug, project_id):
|
||||
if workspace_search == "false" and project_id:
|
||||
issues = issues.filter(project_id=project_id)
|
||||
|
||||
return issues.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
def filter_cycles(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Cycle.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
cycles = Cycle.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def filter_modules(self, query, slug, project_id):
|
||||
if workspace_search == "false" and project_id:
|
||||
cycles = cycles.filter(project_id=project_id)
|
||||
|
||||
return cycles.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
def filter_modules(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Module.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
modules = Module.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def filter_pages(self, query, slug, project_id):
|
||||
if workspace_search == "false" and project_id:
|
||||
modules = modules.filter(project_id=project_id)
|
||||
|
||||
return modules.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
def filter_pages(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Page.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
pages = Page.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def filter_views(self, query, slug, project_id):
|
||||
if workspace_search == "false" and project_id:
|
||||
pages = pages.filter(project_id=project_id)
|
||||
|
||||
return pages.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
def filter_views(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
IssueView.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
issue_views = IssueView.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
if workspace_search == "false" and project_id:
|
||||
issue_views = issue_views.filter(project_id=project_id)
|
||||
|
||||
return issue_views.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
query = request.query_params.get("search", False)
|
||||
workspace_search = request.query_params.get("workspace_search", "false")
|
||||
project_id = request.query_params.get("project_id", False)
|
||||
|
||||
if not query:
|
||||
return Response(
|
||||
{
|
||||
@@ -191,7 +203,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
|
||||
for model in MODELS_MAPPER.keys():
|
||||
func = MODELS_MAPPER.get(model, None)
|
||||
results[model] = func(query, slug, project_id)
|
||||
results[model] = func(query, slug, project_id, workspace_search)
|
||||
return Response({"results": results}, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
@@ -206,37 +218,66 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
query = request.query_params.get("search", False)
|
||||
parent = request.query_params.get("parent", False)
|
||||
blocker_blocked_by = request.query_params.get("blocker_blocked_by", False)
|
||||
workspace_search = request.query_params.get("workspace_search", "false")
|
||||
parent = request.query_params.get("parent", "false")
|
||||
blocker_blocked_by = request.query_params.get("blocker_blocked_by", "false")
|
||||
cycle = request.query_params.get("cycle", "false")
|
||||
module = request.query_params.get("module", "false")
|
||||
sub_issue = request.query_params.get("sub_issue", "false")
|
||||
|
||||
issue_id = request.query_params.get("issue_id", False)
|
||||
|
||||
issues = search_issues(query)
|
||||
issues = issues.filter(
|
||||
issues = Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
)
|
||||
|
||||
if workspace_search == "false":
|
||||
issues = issues.filter(project_id=project_id)
|
||||
|
||||
if query:
|
||||
issues = search_issues(query, issues)
|
||||
|
||||
if parent == "true" and issue_id:
|
||||
issue = Issue.objects.get(pk=issue_id)
|
||||
issue = Issue.issue_objects.get(pk=issue_id)
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True
|
||||
).exclude(
|
||||
pk__in=Issue.objects.filter(parent__isnull=False).values_list(
|
||||
pk__in=Issue.issue_objects.filter(parent__isnull=False).values_list(
|
||||
"parent_id", flat=True
|
||||
)
|
||||
)
|
||||
if blocker_blocked_by == "true" and issue_id:
|
||||
issues = issues.filter(blocker_issues=issue_id, blocked_issues=issue_id)
|
||||
issue = Issue.issue_objects.get(pk=issue_id)
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id),
|
||||
~Q(blocked_issues__block=issue),
|
||||
~Q(blocker_issues__blocked_by=issue),
|
||||
)
|
||||
if sub_issue == "true" and issue_id:
|
||||
issue = Issue.issue_objects.get(pk=issue_id)
|
||||
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
|
||||
if issue.parent:
|
||||
issues = issues.filter(~Q(pk=issue.parent_id))
|
||||
|
||||
if cycle == "true":
|
||||
issues = issues.exclude(issue_cycle__isnull=False)
|
||||
|
||||
if module == "true":
|
||||
issues = issues.exclude(issue_module__isnull=False)
|
||||
|
||||
return Response(
|
||||
issues.values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__name",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
"state__name",
|
||||
"state__group",
|
||||
"state__color",
|
||||
),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
@@ -245,7 +286,7 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
{"error": "Issue Does not exist"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
print(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
# Module imports
|
||||
from . import BaseViewSet
|
||||
from plane.api.serializers import ShortCutSerializer
|
||||
from plane.api.permissions import ProjectEntityPermission
|
||||
from plane.db.models import Shortcut
|
||||
|
||||
|
||||
class ShortCutViewSet(BaseViewSet):
|
||||
|
||||
serializer_class = ShortCutSerializer
|
||||
model = Shortcut
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save(project_id=self.kwargs.get("project_id"))
|
||||
|
||||
def get_queryset(self):
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.distinct()
|
||||
)
|
||||
@@ -3,13 +3,13 @@ from itertools import groupby
|
||||
|
||||
# Django imports
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import Q
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
|
||||
# Module imports
|
||||
from . import BaseViewSet, BaseAPIView
|
||||
from plane.api.serializers import StateSerializer
|
||||
@@ -34,6 +34,7 @@ class StateViewSet(BaseViewSet):
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(~Q(name="Triage"))
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.distinct()
|
||||
@@ -80,7 +81,8 @@ class StateViewSet(BaseViewSet):
|
||||
def destroy(self, request, slug, project_id, pk):
|
||||
try:
|
||||
state = State.objects.get(
|
||||
pk=pk, project_id=project_id, workspace__slug=slug
|
||||
~Q(name="Triage"),
|
||||
pk=pk, project_id=project_id, workspace__slug=slug,
|
||||
)
|
||||
|
||||
if state.default:
|
||||
@@ -89,7 +91,7 @@ class StateViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
# Check for any issues in the state
|
||||
issue_exist = Issue.objects.filter(state=pk).exists()
|
||||
issue_exist = Issue.issue_objects.filter(state=pk).exists()
|
||||
|
||||
if issue_exist:
|
||||
return Response(
|
||||
@@ -103,22 +105,3 @@ class StateViewSet(BaseViewSet):
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except State.DoesNotExist:
|
||||
return Response({"error": "State does not exists"}, status=status.HTTP_404)
|
||||
|
||||
|
||||
class StateDeleteIssueCheckEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id, pk):
|
||||
try:
|
||||
issue_count = Issue.objects.filter(
|
||||
state=pk, workspace__slug=slug, project_id=project_id
|
||||
).count()
|
||||
return Response({"issue_count": issue_count}, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -31,36 +31,71 @@ class UserEndpoint(BaseViewSet):
|
||||
|
||||
def retrieve(self, request):
|
||||
try:
|
||||
workspace = Workspace.objects.get(pk=request.user.last_workspace_id)
|
||||
workspace = Workspace.objects.get(
|
||||
pk=request.user.last_workspace_id, workspace_member__member=request.user
|
||||
)
|
||||
workspace_invites = WorkspaceMemberInvite.objects.filter(
|
||||
email=request.user.email
|
||||
).count()
|
||||
assigned_issues = Issue.objects.filter(assignees__in=[request.user]).count()
|
||||
assigned_issues = Issue.issue_objects.filter(
|
||||
assignees__in=[request.user]
|
||||
).count()
|
||||
|
||||
serialized_data = UserSerializer(request.user).data
|
||||
serialized_data["workspace"] = {
|
||||
"last_workspace_id": request.user.last_workspace_id,
|
||||
"last_workspace_slug": workspace.slug,
|
||||
"fallback_workspace_id": request.user.last_workspace_id,
|
||||
"fallback_workspace_slug": workspace.slug,
|
||||
"invites": workspace_invites,
|
||||
}
|
||||
serialized_data.setdefault("issues", {})[
|
||||
"assigned_issues"
|
||||
] = assigned_issues
|
||||
|
||||
return Response(
|
||||
{
|
||||
"user": UserSerializer(request.user).data,
|
||||
"slug": workspace.slug,
|
||||
"workspace_invites": workspace_invites,
|
||||
"assigned_issues": assigned_issues,
|
||||
},
|
||||
serialized_data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Workspace.DoesNotExist:
|
||||
# This exception will be hit even when the `last_workspace_id` is None
|
||||
|
||||
workspace_invites = WorkspaceMemberInvite.objects.filter(
|
||||
email=request.user.email
|
||||
).count()
|
||||
assigned_issues = Issue.objects.filter(assignees__in=[request.user]).count()
|
||||
assigned_issues = Issue.issue_objects.filter(
|
||||
assignees__in=[request.user]
|
||||
).count()
|
||||
|
||||
fallback_workspace = (
|
||||
Workspace.objects.filter(workspace_member__member=request.user)
|
||||
.order_by("created_at")
|
||||
.first()
|
||||
)
|
||||
|
||||
serialized_data = UserSerializer(request.user).data
|
||||
|
||||
serialized_data["workspace"] = {
|
||||
"last_workspace_id": None,
|
||||
"last_workspace_slug": None,
|
||||
"fallback_workspace_id": fallback_workspace.id
|
||||
if fallback_workspace is not None
|
||||
else None,
|
||||
"fallback_workspace_slug": fallback_workspace.slug
|
||||
if fallback_workspace is not None
|
||||
else None,
|
||||
"invites": workspace_invites,
|
||||
}
|
||||
serialized_data.setdefault("issues", {})[
|
||||
"assigned_issues"
|
||||
] = assigned_issues
|
||||
|
||||
return Response(
|
||||
{
|
||||
"user": UserSerializer(request.user).data,
|
||||
"slug": None,
|
||||
"workspace_invites": workspace_invites,
|
||||
"assigned_issues": assigned_issues,
|
||||
},
|
||||
serialized_data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -73,20 +108,23 @@ class UpdateUserOnBoardedEndpoint(BaseAPIView):
|
||||
user = User.objects.get(pk=request.user.id)
|
||||
user.is_onboarded = request.data.get("is_onboarded", False)
|
||||
user.save()
|
||||
return Response(
|
||||
{"message": "Updated successfully"}, status=status.HTTP_200_OK
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if user.last_workspace_id is not None:
|
||||
user_role = WorkspaceMember.objects.filter(
|
||||
workspace_id=user.last_workspace_id, member=request.user.id
|
||||
).first()
|
||||
return Response(
|
||||
{
|
||||
"message": "Updated successfully",
|
||||
"role": user_role.company_role
|
||||
if user_role is not None
|
||||
else None,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
class UpdateUserTourCompletedEndpoint(BaseAPIView):
|
||||
def patch(self, request):
|
||||
try:
|
||||
user = User.objects.get(pk=request.user.id)
|
||||
user.is_tour_completed = request.data.get("is_tour_completed", False)
|
||||
user.save()
|
||||
return Response(
|
||||
{"message": "Updated successfully"}, status=status.HTTP_200_OK
|
||||
)
|
||||
@@ -102,7 +140,7 @@ class UserActivityEndpoint(BaseAPIView, BasePaginator):
|
||||
def get(self, request):
|
||||
try:
|
||||
queryset = IssueActivity.objects.filter(actor=request.user).select_related(
|
||||
"actor", "workspace"
|
||||
"actor", "workspace", "issue", "project"
|
||||
)
|
||||
|
||||
return self.paginate(
|
||||
@@ -18,11 +18,8 @@ from plane.api.permissions import ProjectEntityPermission
|
||||
from plane.db.models import (
|
||||
IssueView,
|
||||
Issue,
|
||||
IssueBlocker,
|
||||
IssueLink,
|
||||
CycleIssue,
|
||||
ModuleIssue,
|
||||
IssueViewFavorite,
|
||||
IssueReaction,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
@@ -71,7 +68,7 @@ class ViewIssuesEndpoint(BaseAPIView):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
issues = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
**queries, project_id=project_id, workspace__slug=slug
|
||||
)
|
||||
.filter(**filters)
|
||||
@@ -81,6 +78,12 @@ class ViewIssuesEndpoint(BaseAPIView):
|
||||
.select_related("parent")
|
||||
.prefetch_related("assignees")
|
||||
.prefetch_related("labels")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
queryset=IssueReaction.objects.select_related("actor"),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
serializer = IssueLiteSerializer(issues, many=True)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import jwt
|
||||
from datetime import date, datetime
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from uuid import uuid4
|
||||
|
||||
# Django imports
|
||||
from django.db import IntegrityError
|
||||
@@ -12,15 +13,22 @@ from django.core.exceptions import ValidationError
|
||||
from django.core.validators import validate_email
|
||||
from django.contrib.sites.shortcuts import get_current_site
|
||||
from django.db.models import (
|
||||
CharField,
|
||||
Count,
|
||||
Prefetch,
|
||||
OuterRef,
|
||||
Func,
|
||||
F,
|
||||
Q,
|
||||
Count,
|
||||
Case,
|
||||
Value,
|
||||
CharField,
|
||||
When,
|
||||
Max,
|
||||
IntegerField,
|
||||
)
|
||||
from django.db.models.functions import ExtractWeek, Cast, ExtractDay
|
||||
from django.db.models.fields import DateField
|
||||
from django.contrib.auth.hashers import make_password
|
||||
|
||||
# Third party modules
|
||||
from rest_framework import status
|
||||
@@ -37,6 +45,9 @@ from plane.api.serializers import (
|
||||
UserLiteSerializer,
|
||||
ProjectMemberSerializer,
|
||||
WorkspaceThemeSerializer,
|
||||
IssueActivitySerializer,
|
||||
IssueLiteSerializer,
|
||||
WorkspaceMemberAdminSerializer,
|
||||
)
|
||||
from plane.api.views.base import BaseAPIView
|
||||
from . import BaseViewSet
|
||||
@@ -50,9 +61,32 @@ from plane.db.models import (
|
||||
IssueActivity,
|
||||
Issue,
|
||||
WorkspaceTheme,
|
||||
IssueAssignee,
|
||||
ProjectFavorite,
|
||||
CycleFavorite,
|
||||
ModuleMember,
|
||||
ModuleFavorite,
|
||||
PageFavorite,
|
||||
Page,
|
||||
IssueViewFavorite,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
IssueSubscriber,
|
||||
Project,
|
||||
Label,
|
||||
WorkspaceMember,
|
||||
CycleIssue,
|
||||
IssueReaction,
|
||||
)
|
||||
from plane.api.permissions import (
|
||||
WorkSpaceBasePermission,
|
||||
WorkSpaceAdminPermission,
|
||||
WorkspaceEntityPermission,
|
||||
WorkspaceViewerPermission,
|
||||
)
|
||||
from plane.api.permissions import WorkSpaceBasePermission, WorkSpaceAdminPermission
|
||||
from plane.bgtasks.workspace_invitation_task import workspace_invitation
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.grouper import group_results
|
||||
|
||||
|
||||
class WorkSpaceViewSet(BaseViewSet):
|
||||
@@ -72,14 +106,49 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
lookup_field = "slug"
|
||||
|
||||
def get_queryset(self):
|
||||
return self.filter_queryset(
|
||||
super().get_queryset().select_related("owner")
|
||||
).order_by("name")
|
||||
member_count = (
|
||||
WorkspaceMember.objects.filter(
|
||||
workspace=OuterRef("id"), member__is_bot=False
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
|
||||
issue_count = (
|
||||
Issue.objects.filter(workspace=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
return (
|
||||
self.filter_queryset(super().get_queryset().select_related("owner"))
|
||||
.order_by("name")
|
||||
.filter(workspace_member__member=self.request.user)
|
||||
.annotate(total_members=member_count)
|
||||
.annotate(total_issues=issue_count)
|
||||
.select_related("owner")
|
||||
)
|
||||
|
||||
def create(self, request):
|
||||
try:
|
||||
serializer = WorkSpaceSerializer(data=request.data)
|
||||
|
||||
slug = request.data.get("slug", False)
|
||||
name = request.data.get("name", False)
|
||||
|
||||
if not name or not slug:
|
||||
return Response(
|
||||
{"error": "Both name and slug are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if len(name) > 80 or len(slug) > 48:
|
||||
return Response(
|
||||
{"error": "The maximum length for name is 80 and for slug is 48"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save(owner=request.user)
|
||||
# Create Workspace member
|
||||
@@ -125,21 +194,36 @@ class UserWorkSpacesEndpoint(BaseAPIView):
|
||||
def get(self, request):
|
||||
try:
|
||||
member_count = (
|
||||
WorkspaceMember.objects.filter(workspace=OuterRef("id"))
|
||||
WorkspaceMember.objects.filter(
|
||||
workspace=OuterRef("id"), member__is_bot=False
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
|
||||
issue_count = (
|
||||
Issue.objects.filter(workspace=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
|
||||
workspace = (
|
||||
Workspace.objects.prefetch_related(
|
||||
Prefetch("workspace_member", queryset=WorkspaceMember.objects.all())
|
||||
(
|
||||
Workspace.objects.prefetch_related(
|
||||
Prefetch(
|
||||
"workspace_member", queryset=WorkspaceMember.objects.all()
|
||||
)
|
||||
)
|
||||
.filter(
|
||||
workspace_member__member=request.user,
|
||||
)
|
||||
.select_related("owner")
|
||||
)
|
||||
.filter(
|
||||
workspace_member__member=request.user,
|
||||
)
|
||||
.select_related("owner")
|
||||
).annotate(total_members=member_count)
|
||||
.annotate(total_members=member_count)
|
||||
.annotate(total_issues=issue_count)
|
||||
)
|
||||
|
||||
serializer = WorkSpaceSerializer(self.filter_queryset(workspace), many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -187,6 +271,22 @@ class InviteWorkspaceEndpoint(BaseAPIView):
|
||||
{"error": "Emails are required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# check for role level
|
||||
requesting_user = WorkspaceMember.objects.get(
|
||||
workspace__slug=slug, member=request.user
|
||||
)
|
||||
if len(
|
||||
[
|
||||
email
|
||||
for email in emails
|
||||
if int(email.get("role", 10)) > requesting_user.role
|
||||
]
|
||||
):
|
||||
return Response(
|
||||
{"error": "You cannot invite a user with higher role"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
# Check if user is already a member of workspace
|
||||
@@ -223,6 +323,7 @@ class InviteWorkspaceEndpoint(BaseAPIView):
|
||||
algorithm="HS256",
|
||||
),
|
||||
role=email.get("role", 10),
|
||||
created_by=request.user,
|
||||
)
|
||||
)
|
||||
except ValidationError:
|
||||
@@ -240,6 +341,21 @@ class InviteWorkspaceEndpoint(BaseAPIView):
|
||||
email__in=[email.get("email") for email in emails]
|
||||
).select_related("workspace")
|
||||
|
||||
# create the user if signup is disabled
|
||||
if settings.DOCKERIZED and not settings.ENABLE_SIGNUP:
|
||||
_ = User.objects.bulk_create(
|
||||
[
|
||||
User(
|
||||
username=str(uuid4().hex),
|
||||
email=invitation.email,
|
||||
password=make_password(uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
)
|
||||
for invitation in workspace_invitations
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
for invitation in workspace_invitations:
|
||||
workspace_invitation.delay(
|
||||
invitation.email,
|
||||
@@ -352,9 +468,33 @@ class WorkspaceInvitationsViewset(BaseViewSet):
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.select_related("workspace", "workspace__owner")
|
||||
.select_related("workspace", "workspace__owner", "created_by")
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, pk):
|
||||
try:
|
||||
workspace_member_invite = WorkspaceMemberInvite.objects.get(
|
||||
pk=pk, workspace__slug=slug
|
||||
)
|
||||
# delete the user if signup is disabled
|
||||
if settings.DOCKERIZED and not settings.ENABLE_SIGNUP:
|
||||
user = User.objects.filter(email=workspace_member_invite.email).first()
|
||||
if user is not None:
|
||||
user.delete()
|
||||
workspace_member_invite.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except WorkspaceMemberInvite.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Workspace member invite does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class UserWorkspaceInvitationsEndpoint(BaseViewSet):
|
||||
serializer_class = WorkSpaceMemberInviteSerializer
|
||||
@@ -365,7 +505,8 @@ class UserWorkspaceInvitationsEndpoint(BaseViewSet):
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(email=self.request.user.email)
|
||||
.select_related("workspace", "workspace__owner")
|
||||
.select_related("workspace", "workspace__owner", "created_by")
|
||||
.annotate(total_members=Count("workspace__workspace_member"))
|
||||
)
|
||||
|
||||
def create(self, request):
|
||||
@@ -381,6 +522,7 @@ class UserWorkspaceInvitationsEndpoint(BaseViewSet):
|
||||
workspace=invitation.workspace,
|
||||
member=request.user,
|
||||
role=invitation.role,
|
||||
created_by=request.user,
|
||||
)
|
||||
for invitation in workspace_invitations
|
||||
],
|
||||
@@ -400,7 +542,7 @@ class UserWorkspaceInvitationsEndpoint(BaseViewSet):
|
||||
|
||||
|
||||
class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
serializer_class = WorkSpaceMemberSerializer
|
||||
serializer_class = WorkspaceMemberAdminSerializer
|
||||
model = WorkspaceMember
|
||||
|
||||
permission_classes = [
|
||||
@@ -408,7 +550,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
"member__email",
|
||||
"member__display_name",
|
||||
"member__first_name",
|
||||
]
|
||||
|
||||
@@ -421,6 +563,131 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
.select_related("member")
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, pk):
|
||||
try:
|
||||
workspace_member = WorkspaceMember.objects.get(pk=pk, workspace__slug=slug)
|
||||
if request.user.id == workspace_member.member_id:
|
||||
return Response(
|
||||
{"error": "You cannot update your own role"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the requested user role
|
||||
requested_workspace_member = WorkspaceMember.objects.get(
|
||||
workspace__slug=slug, member=request.user
|
||||
)
|
||||
# Check if role is being updated
|
||||
# One cannot update role higher than his own role
|
||||
if (
|
||||
"role" in request.data
|
||||
and int(request.data.get("role", workspace_member.role))
|
||||
> requested_workspace_member.role
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error": "You cannot update a role that is higher than your own role"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = WorkSpaceMemberSerializer(
|
||||
workspace_member, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except WorkspaceMember.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Workspace Member does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, pk):
|
||||
try:
|
||||
# Check the user role who is deleting the user
|
||||
workspace_member = WorkspaceMember.objects.get(workspace__slug=slug, pk=pk)
|
||||
|
||||
# check requesting user role
|
||||
requesting_workspace_member = WorkspaceMember.objects.get(
|
||||
workspace__slug=slug, member=request.user
|
||||
)
|
||||
if requesting_workspace_member.role < workspace_member.role:
|
||||
return Response(
|
||||
{"error": "You cannot remove a user having role higher than you"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check for the only member in the workspace
|
||||
if (
|
||||
workspace_member.role == 20
|
||||
and WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
role=20,
|
||||
member__is_bot=False,
|
||||
).count()
|
||||
== 1
|
||||
):
|
||||
return Response(
|
||||
{"error": "Cannot delete the only Admin for the workspace"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Delete the user also from all the projects
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug, member=workspace_member.member
|
||||
).delete()
|
||||
# Remove all favorites
|
||||
ProjectFavorite.objects.filter(
|
||||
workspace__slug=slug, user=workspace_member.member
|
||||
).delete()
|
||||
CycleFavorite.objects.filter(
|
||||
workspace__slug=slug, user=workspace_member.member
|
||||
).delete()
|
||||
ModuleFavorite.objects.filter(
|
||||
workspace__slug=slug, user=workspace_member.member
|
||||
).delete()
|
||||
PageFavorite.objects.filter(
|
||||
workspace__slug=slug, user=workspace_member.member
|
||||
).delete()
|
||||
IssueViewFavorite.objects.filter(
|
||||
workspace__slug=slug, user=workspace_member.member
|
||||
).delete()
|
||||
# Also remove issue from issue assigned
|
||||
IssueAssignee.objects.filter(
|
||||
workspace__slug=slug, assignee=workspace_member.member
|
||||
).delete()
|
||||
|
||||
# Remove if module member
|
||||
ModuleMember.objects.filter(
|
||||
workspace__slug=slug, member=workspace_member.member
|
||||
).delete()
|
||||
# Delete owned Pages
|
||||
Page.objects.filter(
|
||||
workspace__slug=slug, owned_by=workspace_member.member
|
||||
).delete()
|
||||
|
||||
workspace_member.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except WorkspaceMember.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Workspace Member does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class TeamMemberViewSet(BaseViewSet):
|
||||
serializer_class = TeamSerializer
|
||||
@@ -430,7 +697,7 @@ class TeamMemberViewSet(BaseViewSet):
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
"member__email",
|
||||
"member__display_name",
|
||||
"member__first_name",
|
||||
]
|
||||
|
||||
@@ -623,7 +890,7 @@ class UserIssueCompletedGraphEndpoint(BaseAPIView):
|
||||
month = request.GET.get("month", 1)
|
||||
|
||||
issues = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
assignees__in=[request.user],
|
||||
workspace__slug=slug,
|
||||
completed_at__month=month,
|
||||
@@ -668,7 +935,7 @@ class UserWorkspaceDashboardEndpoint(BaseAPIView):
|
||||
month = request.GET.get("month", 1)
|
||||
|
||||
completed_issues = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
assignees__in=[request.user],
|
||||
workspace__slug=slug,
|
||||
completed_at__month=month,
|
||||
@@ -681,24 +948,24 @@ class UserWorkspaceDashboardEndpoint(BaseAPIView):
|
||||
.order_by("week_in_month")
|
||||
)
|
||||
|
||||
assigned_issues = Issue.objects.filter(
|
||||
assigned_issues = Issue.issue_objects.filter(
|
||||
workspace__slug=slug, assignees__in=[request.user]
|
||||
).count()
|
||||
|
||||
pending_issues_count = Issue.objects.filter(
|
||||
pending_issues_count = Issue.issue_objects.filter(
|
||||
~Q(state__group__in=["completed", "cancelled"]),
|
||||
workspace__slug=slug,
|
||||
assignees__in=[request.user],
|
||||
).count()
|
||||
|
||||
completed_issues_count = Issue.objects.filter(
|
||||
completed_issues_count = Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
assignees__in=[request.user],
|
||||
state__group="completed",
|
||||
).count()
|
||||
|
||||
issues_due_week = (
|
||||
Issue.objects.filter(
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
assignees__in=[request.user],
|
||||
)
|
||||
@@ -708,14 +975,16 @@ class UserWorkspaceDashboardEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
state_distribution = (
|
||||
Issue.objects.filter(workspace__slug=slug, assignees__in=[request.user])
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug, assignees__in=[request.user]
|
||||
)
|
||||
.annotate(state_group=F("state__group"))
|
||||
.values("state_group")
|
||||
.annotate(state_count=Count("state_group"))
|
||||
.order_by("state_group")
|
||||
)
|
||||
|
||||
overdue_issues = Issue.objects.filter(
|
||||
overdue_issues = Issue.issue_objects.filter(
|
||||
~Q(state__group__in=["completed", "cancelled"]),
|
||||
workspace__slug=slug,
|
||||
assignees__in=[request.user],
|
||||
@@ -723,13 +992,13 @@ class UserWorkspaceDashboardEndpoint(BaseAPIView):
|
||||
completed_at__isnull=True,
|
||||
).values("id", "name", "workspace__slug", "project_id", "target_date")
|
||||
|
||||
upcoming_issues = Issue.objects.filter(
|
||||
upcoming_issues = Issue.issue_objects.filter(
|
||||
~Q(state__group__in=["completed", "cancelled"]),
|
||||
target_date__gte=timezone.now(),
|
||||
start_date__gte=timezone.now(),
|
||||
workspace__slug=slug,
|
||||
assignees__in=[request.user],
|
||||
completed_at__isnull=True,
|
||||
).values("id", "name", "workspace__slug", "project_id", "target_date")
|
||||
).values("id", "name", "workspace__slug", "project_id", "start_date")
|
||||
|
||||
return Response(
|
||||
{
|
||||
@@ -784,3 +1053,422 @@ class WorkspaceThemeViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, user_id):
|
||||
try:
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
state_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(state_group=F("state__group"))
|
||||
.values("state_group")
|
||||
.annotate(state_count=Count("state_group"))
|
||||
.order_by("state_group")
|
||||
)
|
||||
|
||||
priority_order = ["urgent", "high", "medium", "low", None]
|
||||
|
||||
priority_distribution = (
|
||||
Issue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.values("priority")
|
||||
.annotate(priority_count=Count("priority"))
|
||||
.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
default=Value(len(priority_order)),
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
)
|
||||
.order_by("priority_order")
|
||||
)
|
||||
|
||||
created_issues = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
project__project_projectmember__member=request.user,
|
||||
created_by_id=user_id,
|
||||
)
|
||||
.filter(**filters)
|
||||
.count()
|
||||
)
|
||||
|
||||
assigned_issues_count = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.count()
|
||||
)
|
||||
|
||||
pending_issues_count = (
|
||||
Issue.issue_objects.filter(
|
||||
~Q(state__group__in=["completed", "cancelled"]),
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.count()
|
||||
)
|
||||
|
||||
completed_issues_count = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
state__group="completed",
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.count()
|
||||
)
|
||||
|
||||
subscribed_issues_count = (
|
||||
IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug,
|
||||
subscriber_id=user_id,
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.count()
|
||||
)
|
||||
|
||||
upcoming_cycles = CycleIssue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
cycle__start_date__gt=timezone.now().date(),
|
||||
issue__assignees__in=[
|
||||
user_id,
|
||||
],
|
||||
).values("cycle__name", "cycle__id", "cycle__project_id")
|
||||
|
||||
present_cycle = CycleIssue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
cycle__start_date__lt=timezone.now().date(),
|
||||
cycle__end_date__gt=timezone.now().date(),
|
||||
issue__assignees__in=[
|
||||
user_id,
|
||||
],
|
||||
).values("cycle__name", "cycle__id", "cycle__project_id")
|
||||
|
||||
return Response(
|
||||
{
|
||||
"state_distribution": state_distribution,
|
||||
"priority_distribution": priority_distribution,
|
||||
"created_issues": created_issues,
|
||||
"assigned_issues": assigned_issues_count,
|
||||
"completed_issues": completed_issues_count,
|
||||
"pending_issues": pending_issues_count,
|
||||
"subscribed_issues": subscribed_issues_count,
|
||||
"present_cycles": present_cycle,
|
||||
"upcoming_cycles": upcoming_cycles,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceUserActivityEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, user_id):
|
||||
try:
|
||||
projects = request.query_params.getlist("project", [])
|
||||
|
||||
queryset = IssueActivity.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
actor=user_id,
|
||||
).select_related("actor", "workspace", "issue", "project")
|
||||
|
||||
if projects:
|
||||
queryset = queryset.filter(project__in=projects)
|
||||
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=queryset,
|
||||
on_results=lambda issue_activities: IssueActivitySerializer(
|
||||
issue_activities, many=True
|
||||
).data,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceUserProfileEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, user_id):
|
||||
try:
|
||||
user_data = User.objects.get(pk=user_id)
|
||||
|
||||
requesting_workspace_member = WorkspaceMember.objects.get(
|
||||
workspace__slug=slug, member=request.user
|
||||
)
|
||||
projects = []
|
||||
if requesting_workspace_member.role >= 10:
|
||||
projects = (
|
||||
Project.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_projectmember__member=request.user,
|
||||
)
|
||||
.annotate(
|
||||
created_issues=Count(
|
||||
"project_issue",
|
||||
filter=Q(project_issue__created_by_id=user_id),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
assigned_issues=Count(
|
||||
"project_issue",
|
||||
filter=Q(project_issue__assignees__in=[user_id]),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"project_issue",
|
||||
filter=Q(
|
||||
project_issue__completed_at__isnull=False,
|
||||
project_issue__assignees__in=[user_id],
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"project_issue",
|
||||
filter=Q(
|
||||
project_issue__state__group__in=[
|
||||
"backlog",
|
||||
"unstarted",
|
||||
"started",
|
||||
],
|
||||
project_issue__assignees__in=[user_id],
|
||||
),
|
||||
)
|
||||
)
|
||||
.values(
|
||||
"id",
|
||||
"name",
|
||||
"identifier",
|
||||
"emoji",
|
||||
"icon_prop",
|
||||
"created_issues",
|
||||
"assigned_issues",
|
||||
"completed_issues",
|
||||
"pending_issues",
|
||||
)
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"project_data": projects,
|
||||
"user_data": {
|
||||
"email": user_data.email,
|
||||
"first_name": user_data.first_name,
|
||||
"last_name": user_data.last_name,
|
||||
"avatar": user_data.avatar,
|
||||
"cover_image": user_data.cover_image,
|
||||
"date_joined": user_data.date_joined,
|
||||
"user_timezone": user_data.user_timezone,
|
||||
"display_name": user_data.display_name,
|
||||
},
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except WorkspaceMember.DoesNotExist:
|
||||
return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceViewerPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, user_id):
|
||||
try:
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
issue_queryset = (
|
||||
Issue.issue_objects.filter(
|
||||
Q(assignees__in=[user_id])
|
||||
| Q(created_by_id=user_id)
|
||||
| Q(issue_subscribers__subscriber_id=user_id),
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.select_related("project", "workspace", "state", "parent")
|
||||
.prefetch_related("assignees", "labels")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
queryset=IssueReaction.objects.select_related("actor"),
|
||||
)
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
).distinct()
|
||||
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
priority_order = (
|
||||
priority_order
|
||||
if order_by_param == "priority"
|
||||
else priority_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
|
||||
# State Ordering
|
||||
elif order_by_param in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
"-state__name",
|
||||
"-state__group",
|
||||
]:
|
||||
state_order = (
|
||||
state_order
|
||||
if order_by_param in ["state__name", "state__group"]
|
||||
else state_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
state_order=Case(
|
||||
*[
|
||||
When(state__group=state_group, then=Value(i))
|
||||
for i, state_group in enumerate(state_order)
|
||||
],
|
||||
default=Value(len(state_order)),
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("state_order")
|
||||
# assignee and label ordering
|
||||
elif order_by_param in [
|
||||
"labels__name",
|
||||
"-labels__name",
|
||||
"assignees__first_name",
|
||||
"-assignees__first_name",
|
||||
]:
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
max_values=Max(
|
||||
order_by_param[1::]
|
||||
if order_by_param.startswith("-")
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-max_values" if order_by_param.startswith("-") else "max_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True).data
|
||||
|
||||
## Grouping the results
|
||||
group_by = request.GET.get("group_by", False)
|
||||
if group_by:
|
||||
return Response(
|
||||
group_results(issues, group_by), status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceLabelsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceViewerPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
labels = Label.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
).values("parent", "name", "color", "id", "project_id", "workspace__slug")
|
||||
return Response(labels, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceMembersEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
workspace_members = WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member__is_bot=False,
|
||||
).select_related("workspace", "member")
|
||||
serialzier = WorkSpaceMemberSerializer(workspace_members, many=True)
|
||||
return Response(serialzier.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
176
apiserver/plane/bgtasks/analytic_plot_export.py
Normal file
176
apiserver/plane/bgtasks/analytic_plot_export.py
Normal file
@@ -0,0 +1,176 @@
|
||||
# Python imports
|
||||
import csv
|
||||
import io
|
||||
|
||||
# Django imports
|
||||
from django.core.mail import EmailMultiAlternatives
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.html import strip_tags
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import Issue
|
||||
from plane.utils.analytics_plot import build_graph_plot
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
row_mapping = {
|
||||
"state__name": "State",
|
||||
"state__group": "State Group",
|
||||
"labels__name": "Label",
|
||||
"assignees__display_name": "Assignee Name",
|
||||
"start_date": "Start Date",
|
||||
"target_date": "Due Date",
|
||||
"completed_at": "Completed At",
|
||||
"created_at": "Created At",
|
||||
"issue_count": "Issue Count",
|
||||
"priority": "Priority",
|
||||
"estimate": "Estimate",
|
||||
}
|
||||
|
||||
|
||||
@shared_task
|
||||
def analytic_export_task(email, data, slug):
|
||||
try:
|
||||
filters = issue_filters(data, "POST")
|
||||
queryset = Issue.issue_objects.filter(**filters, workspace__slug=slug)
|
||||
|
||||
x_axis = data.get("x_axis", False)
|
||||
y_axis = data.get("y_axis", False)
|
||||
segment = data.get("segment", False)
|
||||
|
||||
distribution = build_graph_plot(
|
||||
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
|
||||
)
|
||||
|
||||
key = "count" if y_axis == "issue_count" else "estimate"
|
||||
|
||||
segmented = segment
|
||||
|
||||
assignee_details = {}
|
||||
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
|
||||
assignee_details = (
|
||||
Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False)
|
||||
.order_by("assignees__id")
|
||||
.distinct("assignees__id")
|
||||
.values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id")
|
||||
)
|
||||
|
||||
if segment:
|
||||
segment_zero = []
|
||||
for item in distribution:
|
||||
current_dict = distribution.get(item)
|
||||
for current in current_dict:
|
||||
segment_zero.append(current.get("segment"))
|
||||
|
||||
segment_zero = list(set(segment_zero))
|
||||
row_zero = (
|
||||
[
|
||||
row_mapping.get(x_axis, "X-Axis"),
|
||||
]
|
||||
+ [
|
||||
row_mapping.get(y_axis, "Y-Axis"),
|
||||
]
|
||||
+ segment_zero
|
||||
)
|
||||
rows = []
|
||||
for item in distribution:
|
||||
generated_row = [
|
||||
item,
|
||||
]
|
||||
|
||||
data = distribution.get(item)
|
||||
# Add y axis values
|
||||
generated_row.append(sum(obj.get(key) for obj in data if obj.get(key, None) is not None))
|
||||
|
||||
for segment in segment_zero:
|
||||
value = [x for x in data if x.get("segment") == segment]
|
||||
if len(value):
|
||||
generated_row.append(value[0].get(key))
|
||||
else:
|
||||
generated_row.append("0")
|
||||
# x-axis replacement for names
|
||||
if x_axis in ["assignees__id"]:
|
||||
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)]
|
||||
if len(assignee):
|
||||
generated_row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
|
||||
rows.append(tuple(generated_row))
|
||||
|
||||
# If segment is ["assignees__display_name"] then replace segment_zero rows with first and last names
|
||||
if segmented in ["assignees__id"]:
|
||||
for index, segm in enumerate(row_zero[2:]):
|
||||
# find the name of the user
|
||||
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(segm)]
|
||||
if len(assignee):
|
||||
row_zero[index + 2] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
|
||||
|
||||
rows = [tuple(row_zero)] + rows
|
||||
csv_buffer = io.StringIO()
|
||||
writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
|
||||
|
||||
# Write CSV data to the buffer
|
||||
for row in rows:
|
||||
writer.writerow(row)
|
||||
|
||||
subject = "Your Export is ready"
|
||||
|
||||
html_content = render_to_string("emails/exports/analytics.html", {})
|
||||
|
||||
text_content = strip_tags(html_content)
|
||||
csv_buffer.seek(0)
|
||||
msg = EmailMultiAlternatives(
|
||||
subject, text_content, settings.EMAIL_FROM, [email]
|
||||
)
|
||||
msg.attach(f"{slug}-analytics.csv", csv_buffer.read())
|
||||
msg.send(fail_silently=False)
|
||||
|
||||
else:
|
||||
row_zero = [
|
||||
row_mapping.get(x_axis, "X-Axis"),
|
||||
row_mapping.get(y_axis, "Y-Axis"),
|
||||
]
|
||||
rows = []
|
||||
for item in distribution:
|
||||
row = [
|
||||
item,
|
||||
distribution.get(item)[0].get("count")
|
||||
if y_axis == "issue_count"
|
||||
else distribution.get(item)[0].get("estimate "),
|
||||
]
|
||||
# x-axis replacement to names
|
||||
if x_axis in ["assignees__id"]:
|
||||
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)]
|
||||
if len(assignee):
|
||||
row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
|
||||
|
||||
rows.append(tuple(row))
|
||||
rows = [tuple(row_zero)] + rows
|
||||
csv_buffer = io.StringIO()
|
||||
writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
|
||||
|
||||
# Write CSV data to the buffer
|
||||
for row in rows:
|
||||
writer.writerow(row)
|
||||
|
||||
subject = "Your Export is ready"
|
||||
|
||||
html_content = render_to_string("emails/exports/analytics.html", {})
|
||||
|
||||
text_content = strip_tags(html_content)
|
||||
|
||||
csv_buffer.seek(0)
|
||||
msg = EmailMultiAlternatives(
|
||||
subject, text_content, settings.EMAIL_FROM, [email]
|
||||
)
|
||||
msg.attach(f"{slug}-analytics.csv", csv_buffer.read())
|
||||
msg.send(fail_silently=False)
|
||||
|
||||
except Exception as e:
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
@@ -19,7 +19,7 @@ def email_verification(first_name, email, token, current_site):
|
||||
|
||||
try:
|
||||
realtivelink = "/request-email-verification/" + "?token=" + str(token)
|
||||
abs_url = "http://" + current_site + realtivelink
|
||||
abs_url = current_site + realtivelink
|
||||
|
||||
from_email_string = settings.EMAIL_FROM
|
||||
|
||||
@@ -39,5 +39,8 @@ def email_verification(first_name, email, token, current_site):
|
||||
msg.send()
|
||||
return
|
||||
except Exception as e:
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
|
||||
346
apiserver/plane/bgtasks/export_task.py
Normal file
346
apiserver/plane/bgtasks/export_task.py
Normal file
@@ -0,0 +1,346 @@
|
||||
# Python imports
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
import boto3
|
||||
import zipfile
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
from botocore.client import Config
|
||||
from openpyxl import Workbook
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import Issue, ExporterHistory
|
||||
|
||||
|
||||
def dateTimeConverter(time):
|
||||
if time:
|
||||
return time.strftime("%a, %d %b %Y %I:%M:%S %Z%z")
|
||||
|
||||
def dateConverter(time):
|
||||
if time:
|
||||
return time.strftime("%a, %d %b %Y")
|
||||
|
||||
def create_csv_file(data):
|
||||
csv_buffer = io.StringIO()
|
||||
csv_writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
|
||||
|
||||
for row in data:
|
||||
csv_writer.writerow(row)
|
||||
|
||||
csv_buffer.seek(0)
|
||||
return csv_buffer.getvalue()
|
||||
|
||||
|
||||
def create_json_file(data):
|
||||
return json.dumps(data)
|
||||
|
||||
|
||||
def create_xlsx_file(data):
|
||||
workbook = Workbook()
|
||||
sheet = workbook.active
|
||||
|
||||
for row in data:
|
||||
sheet.append(row)
|
||||
|
||||
xlsx_buffer = io.BytesIO()
|
||||
workbook.save(xlsx_buffer)
|
||||
xlsx_buffer.seek(0)
|
||||
return xlsx_buffer.getvalue()
|
||||
|
||||
|
||||
def create_zip_file(files):
|
||||
zip_buffer = io.BytesIO()
|
||||
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
for filename, file_content in files:
|
||||
zipf.writestr(filename, file_content)
|
||||
|
||||
zip_buffer.seek(0)
|
||||
return zip_buffer
|
||||
|
||||
|
||||
def upload_to_s3(zip_file, workspace_id, token_id, slug):
|
||||
s3 = boto3.client(
|
||||
"s3",
|
||||
region_name="ap-south-1",
|
||||
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
||||
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
||||
config=Config(signature_version="s3v4"),
|
||||
)
|
||||
file_name = f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip"
|
||||
|
||||
s3.upload_fileobj(
|
||||
zip_file,
|
||||
settings.AWS_S3_BUCKET_NAME,
|
||||
file_name,
|
||||
ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"},
|
||||
)
|
||||
|
||||
expires_in = 7 * 24 * 60 * 60
|
||||
presigned_url = s3.generate_presigned_url(
|
||||
"get_object",
|
||||
Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name},
|
||||
ExpiresIn=expires_in,
|
||||
)
|
||||
|
||||
exporter_instance = ExporterHistory.objects.get(token=token_id)
|
||||
|
||||
if presigned_url:
|
||||
exporter_instance.url = presigned_url
|
||||
exporter_instance.status = "completed"
|
||||
exporter_instance.key = file_name
|
||||
else:
|
||||
exporter_instance.status = "failed"
|
||||
|
||||
exporter_instance.save(update_fields=["status", "url","key"])
|
||||
|
||||
|
||||
def generate_table_row(issue):
|
||||
return [
|
||||
f"""{issue["project__identifier"]}-{issue["sequence_id"]}""",
|
||||
issue["project__name"],
|
||||
issue["name"],
|
||||
issue["description_stripped"],
|
||||
issue["state__name"],
|
||||
issue["priority"],
|
||||
f"{issue['created_by__first_name']} {issue['created_by__last_name']}"
|
||||
if issue["created_by__first_name"] and issue["created_by__last_name"]
|
||||
else "",
|
||||
f"{issue['assignees__first_name']} {issue['assignees__last_name']}"
|
||||
if issue["assignees__first_name"] and issue["assignees__last_name"]
|
||||
else "",
|
||||
issue["labels__name"],
|
||||
issue["issue_cycle__cycle__name"],
|
||||
dateConverter(issue["issue_cycle__cycle__start_date"]),
|
||||
dateConverter(issue["issue_cycle__cycle__end_date"]),
|
||||
issue["issue_module__module__name"],
|
||||
dateConverter(issue["issue_module__module__start_date"]),
|
||||
dateConverter(issue["issue_module__module__target_date"]),
|
||||
dateTimeConverter(issue["created_at"]),
|
||||
dateTimeConverter(issue["updated_at"]),
|
||||
dateTimeConverter(issue["completed_at"]),
|
||||
dateTimeConverter(issue["archived_at"]),
|
||||
]
|
||||
|
||||
|
||||
def generate_json_row(issue):
|
||||
return {
|
||||
"ID": f"""{issue["project__identifier"]}-{issue["sequence_id"]}""",
|
||||
"Project": issue["project__name"],
|
||||
"Name": issue["name"],
|
||||
"Description": issue["description_stripped"],
|
||||
"State": issue["state__name"],
|
||||
"Priority": issue["priority"],
|
||||
"Created By": f"{issue['created_by__first_name']} {issue['created_by__last_name']}"
|
||||
if issue["created_by__first_name"] and issue["created_by__last_name"]
|
||||
else "",
|
||||
"Assignee": f"{issue['assignees__first_name']} {issue['assignees__last_name']}"
|
||||
if issue["assignees__first_name"] and issue["assignees__last_name"]
|
||||
else "",
|
||||
"Labels": issue["labels__name"],
|
||||
"Cycle Name": issue["issue_cycle__cycle__name"],
|
||||
"Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]),
|
||||
"Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]),
|
||||
"Module Name": issue["issue_module__module__name"],
|
||||
"Module Start Date": dateConverter(issue["issue_module__module__start_date"]),
|
||||
"Module Target Date": dateConverter(issue["issue_module__module__target_date"]),
|
||||
"Created At": dateTimeConverter(issue["created_at"]),
|
||||
"Updated At": dateTimeConverter(issue["updated_at"]),
|
||||
"Completed At": dateTimeConverter(issue["completed_at"]),
|
||||
"Archived At": dateTimeConverter(issue["archived_at"]),
|
||||
}
|
||||
|
||||
|
||||
def update_json_row(rows, row):
|
||||
matched_index = next(
|
||||
(
|
||||
index
|
||||
for index, existing_row in enumerate(rows)
|
||||
if existing_row["ID"] == row["ID"]
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
if matched_index is not None:
|
||||
existing_assignees, existing_labels = (
|
||||
rows[matched_index]["Assignee"],
|
||||
rows[matched_index]["Labels"],
|
||||
)
|
||||
assignee, label = row["Assignee"], row["Labels"]
|
||||
|
||||
if assignee is not None and assignee not in existing_assignees:
|
||||
rows[matched_index]["Assignee"] += f", {assignee}"
|
||||
if label is not None and label not in existing_labels:
|
||||
rows[matched_index]["Labels"] += f", {label}"
|
||||
else:
|
||||
rows.append(row)
|
||||
|
||||
|
||||
def update_table_row(rows, row):
|
||||
matched_index = next(
|
||||
(index for index, existing_row in enumerate(rows) if existing_row[0] == row[0]),
|
||||
None,
|
||||
)
|
||||
|
||||
if matched_index is not None:
|
||||
existing_assignees, existing_labels = rows[matched_index][7:9]
|
||||
assignee, label = row[7:9]
|
||||
|
||||
if assignee is not None and assignee not in existing_assignees:
|
||||
rows[matched_index][7] += f", {assignee}"
|
||||
if label is not None and label not in existing_labels:
|
||||
rows[matched_index][8] += f", {label}"
|
||||
else:
|
||||
rows.append(row)
|
||||
|
||||
|
||||
def generate_csv(header, project_id, issues, files):
|
||||
"""
|
||||
Generate CSV export for all the passed issues.
|
||||
"""
|
||||
rows = [
|
||||
header,
|
||||
]
|
||||
for issue in issues:
|
||||
row = generate_table_row(issue)
|
||||
update_table_row(rows, row)
|
||||
csv_file = create_csv_file(rows)
|
||||
files.append((f"{project_id}.csv", csv_file))
|
||||
|
||||
|
||||
def generate_json(header, project_id, issues, files):
|
||||
rows = []
|
||||
for issue in issues:
|
||||
row = generate_json_row(issue)
|
||||
update_json_row(rows, row)
|
||||
json_file = create_json_file(rows)
|
||||
files.append((f"{project_id}.json", json_file))
|
||||
|
||||
|
||||
def generate_xlsx(header, project_id, issues, files):
|
||||
rows = [header]
|
||||
for issue in issues:
|
||||
row = generate_table_row(issue)
|
||||
update_table_row(rows, row)
|
||||
xlsx_file = create_xlsx_file(rows)
|
||||
files.append((f"{project_id}.xlsx", xlsx_file))
|
||||
|
||||
|
||||
@shared_task
|
||||
def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, slug):
|
||||
try:
|
||||
exporter_instance = ExporterHistory.objects.get(token=token_id)
|
||||
exporter_instance.status = "processing"
|
||||
exporter_instance.save(update_fields=["status"])
|
||||
|
||||
workspace_issues = (
|
||||
(
|
||||
Issue.objects.filter(
|
||||
workspace__id=workspace_id, project_id__in=project_ids
|
||||
)
|
||||
.select_related("project", "workspace", "state", "parent", "created_by")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_cycle__cycle", "issue_module__module"
|
||||
)
|
||||
.values(
|
||||
"id",
|
||||
"project__identifier",
|
||||
"project__name",
|
||||
"project__id",
|
||||
"sequence_id",
|
||||
"name",
|
||||
"description_stripped",
|
||||
"priority",
|
||||
"state__name",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"completed_at",
|
||||
"archived_at",
|
||||
"issue_cycle__cycle__name",
|
||||
"issue_cycle__cycle__start_date",
|
||||
"issue_cycle__cycle__end_date",
|
||||
"issue_module__module__name",
|
||||
"issue_module__module__start_date",
|
||||
"issue_module__module__target_date",
|
||||
"created_by__first_name",
|
||||
"created_by__last_name",
|
||||
"assignees__first_name",
|
||||
"assignees__last_name",
|
||||
"labels__name",
|
||||
)
|
||||
)
|
||||
.order_by("project__identifier","sequence_id")
|
||||
.distinct()
|
||||
)
|
||||
# CSV header
|
||||
header = [
|
||||
"ID",
|
||||
"Project",
|
||||
"Name",
|
||||
"Description",
|
||||
"State",
|
||||
"Priority",
|
||||
"Created By",
|
||||
"Assignee",
|
||||
"Labels",
|
||||
"Cycle Name",
|
||||
"Cycle Start Date",
|
||||
"Cycle End Date",
|
||||
"Module Name",
|
||||
"Module Start Date",
|
||||
"Module Target Date",
|
||||
"Created At",
|
||||
"Updated At",
|
||||
"Completed At",
|
||||
"Archived At",
|
||||
]
|
||||
|
||||
EXPORTER_MAPPER = {
|
||||
"csv": generate_csv,
|
||||
"json": generate_json,
|
||||
"xlsx": generate_xlsx,
|
||||
}
|
||||
|
||||
files = []
|
||||
if multiple:
|
||||
for project_id in project_ids:
|
||||
issues = workspace_issues.filter(project__id=project_id)
|
||||
exporter = EXPORTER_MAPPER.get(provider)
|
||||
if exporter is not None:
|
||||
exporter(
|
||||
header,
|
||||
project_id,
|
||||
issues,
|
||||
files,
|
||||
)
|
||||
|
||||
else:
|
||||
exporter = EXPORTER_MAPPER.get(provider)
|
||||
if exporter is not None:
|
||||
exporter(
|
||||
header,
|
||||
workspace_id,
|
||||
workspace_issues,
|
||||
files,
|
||||
)
|
||||
|
||||
zip_buffer = create_zip_file(files)
|
||||
upload_to_s3(zip_buffer, workspace_id, token_id, slug)
|
||||
|
||||
except Exception as e:
|
||||
exporter_instance = ExporterHistory.objects.get(token=token_id)
|
||||
exporter_instance.status = "failed"
|
||||
exporter_instance.reason = str(e)
|
||||
exporter_instance.save(update_fields=["status", "reason"])
|
||||
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
38
apiserver/plane/bgtasks/exporter_expired_task.py
Normal file
38
apiserver/plane/bgtasks/exporter_expired_task.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Python imports
|
||||
import boto3
|
||||
from datetime import timedelta
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from botocore.client import Config
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import ExporterHistory
|
||||
|
||||
|
||||
@shared_task
|
||||
def delete_old_s3_link():
|
||||
# Get a list of keys and IDs to process
|
||||
expired_exporter_history = ExporterHistory.objects.filter(
|
||||
Q(url__isnull=False) & Q(created_at__lte=timezone.now() - timedelta(days=8))
|
||||
).values_list("key", "id")
|
||||
|
||||
s3 = boto3.client(
|
||||
"s3",
|
||||
region_name="ap-south-1",
|
||||
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
||||
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
||||
config=Config(signature_version="s3v4"),
|
||||
)
|
||||
|
||||
for file_name, exporter_id in expired_exporter_history:
|
||||
# Delete object from S3
|
||||
if file_name:
|
||||
s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name)
|
||||
|
||||
ExporterHistory.objects.filter(id=exporter_id).update(url=None)
|
||||
@@ -16,12 +16,12 @@ from plane.db.models import User
|
||||
def forgot_password(first_name, email, uidb64, token, current_site):
|
||||
|
||||
try:
|
||||
realtivelink = f"/email-verify/?uidb64={uidb64}&token={token}/"
|
||||
abs_url = "http://" + current_site + realtivelink
|
||||
realtivelink = f"/reset-password/?uidb64={uidb64}&token={token}"
|
||||
abs_url = current_site + realtivelink
|
||||
|
||||
from_email_string = settings.EMAIL_FROM
|
||||
|
||||
subject = f"Verify your Email!"
|
||||
subject = f"Reset Your Password - Plane"
|
||||
|
||||
context = {
|
||||
"first_name": first_name,
|
||||
@@ -37,5 +37,8 @@ def forgot_password(first_name, email, uidb64, token, current_site):
|
||||
msg.send()
|
||||
return
|
||||
except Exception as e:
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
|
||||
@@ -27,6 +27,7 @@ from plane.db.models import (
|
||||
User,
|
||||
)
|
||||
from .workspace_invitation_task import workspace_invitation
|
||||
from plane.bgtasks.user_welcome_task import send_welcome_slack
|
||||
|
||||
|
||||
@shared_task
|
||||
@@ -40,7 +41,7 @@ def service_importer(service, importer_id):
|
||||
|
||||
# Check if we need to import users as well
|
||||
if len(users):
|
||||
# For all invited users create the uers
|
||||
# For all invited users create the users
|
||||
new_users = User.objects.bulk_create(
|
||||
[
|
||||
User(
|
||||
@@ -56,6 +57,15 @@ def service_importer(service, importer_id):
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
[
|
||||
send_welcome_slack.delay(
|
||||
str(user.id),
|
||||
True,
|
||||
f"{user.email} was imported to Plane from {service}",
|
||||
)
|
||||
for user in new_users
|
||||
]
|
||||
|
||||
workspace_users = User.objects.filter(
|
||||
email__in=[
|
||||
user.get("email").strip().lower()
|
||||
@@ -68,7 +78,11 @@ def service_importer(service, importer_id):
|
||||
# Add new users to Workspace and project automatically
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(member=user, workspace_id=importer.workspace_id)
|
||||
WorkspaceMember(
|
||||
member=user,
|
||||
workspace_id=importer.workspace_id,
|
||||
created_by=importer.created_by,
|
||||
)
|
||||
for user in workspace_users
|
||||
],
|
||||
batch_size=100,
|
||||
@@ -81,6 +95,7 @@ def service_importer(service, importer_id):
|
||||
project_id=importer.project_id,
|
||||
workspace_id=importer.workspace_id,
|
||||
member=user,
|
||||
created_by=importer.created_by,
|
||||
)
|
||||
for user in workspace_users
|
||||
],
|
||||
@@ -160,5 +175,8 @@ def service_importer(service, importer_id):
|
||||
importer = Importer.objects.get(pk=importer_id)
|
||||
importer.status = "failed"
|
||||
importer.save()
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
|
||||
@@ -5,6 +5,7 @@ import requests
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.utils import timezone
|
||||
|
||||
# Third Party imports
|
||||
from celery import shared_task
|
||||
@@ -20,6 +21,9 @@ from plane.db.models import (
|
||||
State,
|
||||
Cycle,
|
||||
Module,
|
||||
IssueSubscriber,
|
||||
Notification,
|
||||
IssueAssignee,
|
||||
)
|
||||
from plane.api.serializers import IssueActivitySerializer
|
||||
|
||||
@@ -44,7 +48,7 @@ def track_name(
|
||||
field="name",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the start date to {requested_data.get('name')}",
|
||||
comment=f"updated the name to {requested_data.get('name')}",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -66,12 +70,12 @@ def track_parent(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=f"{project.identifier}-{old_parent.sequence_id}",
|
||||
old_value=f"{old_parent.project.identifier}-{old_parent.sequence_id}",
|
||||
new_value=None,
|
||||
field="parent",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the parent issue to None",
|
||||
comment=f"updated the parent issue to None",
|
||||
old_identifier=old_parent.id,
|
||||
new_identifier=None,
|
||||
)
|
||||
@@ -84,14 +88,14 @@ def track_parent(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=f"{project.identifier}-{old_parent.sequence_id}"
|
||||
old_value=f"{old_parent.project.identifier}-{old_parent.sequence_id}"
|
||||
if old_parent is not None
|
||||
else None,
|
||||
new_value=f"{project.identifier}-{new_parent.sequence_id}",
|
||||
new_value=f"{new_parent.project.identifier}-{new_parent.sequence_id}",
|
||||
field="parent",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the parent issue to {new_parent.name}",
|
||||
comment=f"updated the parent issue to {new_parent.name}",
|
||||
old_identifier=old_parent.id if old_parent is not None else None,
|
||||
new_identifier=new_parent.id,
|
||||
)
|
||||
@@ -119,7 +123,7 @@ def track_priority(
|
||||
field="priority",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the priority to None",
|
||||
comment=f"updated the priority to None",
|
||||
)
|
||||
)
|
||||
else:
|
||||
@@ -133,10 +137,9 @@ def track_priority(
|
||||
field="priority",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the priority to {requested_data.get('priority')}",
|
||||
comment=f"updated the priority to {requested_data.get('priority')}",
|
||||
)
|
||||
)
|
||||
print(issue_activities)
|
||||
|
||||
|
||||
# Track chnages in state of the issue
|
||||
@@ -162,7 +165,7 @@ def track_state(
|
||||
field="state",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the state to {new_state.name}",
|
||||
comment=f"updated the state to {new_state.name}",
|
||||
old_identifier=old_state.id,
|
||||
new_identifier=new_state.id,
|
||||
)
|
||||
@@ -181,19 +184,24 @@ def track_description(
|
||||
if current_instance.get("description_html") != requested_data.get(
|
||||
"description_html"
|
||||
):
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=current_instance.get("description_html"),
|
||||
new_value=requested_data.get("description_html"),
|
||||
field="description",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the description to {requested_data.get('description_html')}",
|
||||
)
|
||||
)
|
||||
last_activity = IssueActivity.objects.filter(issue_id=issue_id).order_by("-created_at").first()
|
||||
if(last_activity is not None and last_activity.field == "description" and actor.id == last_activity.actor_id):
|
||||
last_activity.created_at = timezone.now()
|
||||
last_activity.save(update_fields=["created_at"])
|
||||
else:
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=current_instance.get("description_html"),
|
||||
new_value=requested_data.get("description_html"),
|
||||
field="description",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"updated the description to {requested_data.get('description_html')}",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
# Track changes in issue target date
|
||||
@@ -217,7 +225,7 @@ def track_target_date(
|
||||
field="target_date",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the target date to None",
|
||||
comment=f"updated the target date to None",
|
||||
)
|
||||
)
|
||||
else:
|
||||
@@ -231,7 +239,7 @@ def track_target_date(
|
||||
field="target_date",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the target date to {requested_data.get('target_date')}",
|
||||
comment=f"updated the target date to {requested_data.get('target_date')}",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -257,7 +265,7 @@ def track_start_date(
|
||||
field="start_date",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the start date to None",
|
||||
comment=f"updated the start date to None",
|
||||
)
|
||||
)
|
||||
else:
|
||||
@@ -271,7 +279,7 @@ def track_start_date(
|
||||
field="start_date",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the start date to {requested_data.get('start_date')}",
|
||||
comment=f"updated the start date to {requested_data.get('start_date')}",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -300,7 +308,7 @@ def track_labels(
|
||||
field="labels",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} added label {label.name}",
|
||||
comment=f"added label {label.name}",
|
||||
new_identifier=label.id,
|
||||
old_identifier=None,
|
||||
)
|
||||
@@ -321,7 +329,7 @@ def track_labels(
|
||||
field="labels",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} removed label {label.name}",
|
||||
comment=f"removed label {label.name}",
|
||||
old_identifier=label.id,
|
||||
new_identifier=None,
|
||||
)
|
||||
@@ -350,12 +358,12 @@ def track_assignees(
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value="",
|
||||
new_value=assignee.email,
|
||||
new_value=assignee.display_name,
|
||||
field="assignees",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} added assignee {assignee.email}",
|
||||
new_identifier=actor.id,
|
||||
comment=f"added assignee {assignee.display_name}",
|
||||
new_identifier=assignee.id,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -371,13 +379,13 @@ def track_assignees(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=assignee.email,
|
||||
old_value=assignee.display_name,
|
||||
new_value="",
|
||||
field="assignee",
|
||||
field="assignees",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} removed assignee {assignee.email}",
|
||||
old_identifier=actor.id,
|
||||
comment=f"removed assignee {assignee.display_name}",
|
||||
old_identifier=assignee.id,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -412,11 +420,11 @@ def track_blocks(
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value="",
|
||||
new_value=f"{project.identifier}-{issue.sequence_id}",
|
||||
new_value=f"{issue.project.identifier}-{issue.sequence_id}",
|
||||
field="blocks",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} added blocking issue {project.identifier}-{issue.sequence_id}",
|
||||
comment=f"added blocking issue {project.identifier}-{issue.sequence_id}",
|
||||
new_identifier=issue.id,
|
||||
)
|
||||
)
|
||||
@@ -433,12 +441,12 @@ def track_blocks(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=f"{project.identifier}-{issue.sequence_id}",
|
||||
old_value=f"{issue.project.identifier}-{issue.sequence_id}",
|
||||
new_value="",
|
||||
field="blocks",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} removed blocking issue {project.identifier}-{issue.sequence_id}",
|
||||
comment=f"removed blocking issue {project.identifier}-{issue.sequence_id}",
|
||||
old_identifier=issue.id,
|
||||
)
|
||||
)
|
||||
@@ -474,11 +482,11 @@ def track_blockings(
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value="",
|
||||
new_value=f"{project.identifier}-{issue.sequence_id}",
|
||||
new_value=f"{issue.project.identifier}-{issue.sequence_id}",
|
||||
field="blocking",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} added blocked by issue {project.identifier}-{issue.sequence_id}",
|
||||
comment=f"added blocked by issue {project.identifier}-{issue.sequence_id}",
|
||||
new_identifier=issue.id,
|
||||
)
|
||||
)
|
||||
@@ -495,130 +503,17 @@ def track_blockings(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=f"{project.identifier}-{issue.sequence_id}",
|
||||
old_value=f"{issue.project.identifier}-{issue.sequence_id}",
|
||||
new_value="",
|
||||
field="blocking",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} removed blocked by issue {project.identifier}-{issue.sequence_id}",
|
||||
comment=f"removed blocked by issue {project.identifier}-{issue.sequence_id}",
|
||||
old_identifier=issue.id,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def track_cycles(
|
||||
requested_data,
|
||||
current_instance,
|
||||
issue_id,
|
||||
project,
|
||||
actor,
|
||||
issue_activities,
|
||||
):
|
||||
# Updated Records:
|
||||
updated_records = current_instance.get("updated_cycle_issues", [])
|
||||
created_records = json.loads(current_instance.get("created_cycle_issues", []))
|
||||
|
||||
for updated_record in updated_records:
|
||||
old_cycle = Cycle.objects.filter(
|
||||
pk=updated_record.get("old_cycle_id", None)
|
||||
).first()
|
||||
new_cycle = Cycle.objects.filter(
|
||||
pk=updated_record.get("new_cycle_id", None)
|
||||
).first()
|
||||
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=updated_record.get("issue_id"),
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=old_cycle.name,
|
||||
new_value=new_cycle.name,
|
||||
field="cycles",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated cycle from {old_cycle.name} to {new_cycle.name}",
|
||||
old_identifier=old_cycle.id,
|
||||
new_identifier=new_cycle.id,
|
||||
)
|
||||
)
|
||||
|
||||
for created_record in created_records:
|
||||
cycle = Cycle.objects.filter(
|
||||
pk=created_record.get("fields").get("cycle")
|
||||
).first()
|
||||
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=created_record.get("fields").get("issue"),
|
||||
actor=actor,
|
||||
verb="created",
|
||||
old_value="",
|
||||
new_value=cycle.name,
|
||||
field="cycles",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} added cycle {cycle.name}",
|
||||
new_identifier=cycle.id,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def track_modules(
|
||||
requested_data,
|
||||
current_instance,
|
||||
issue_id,
|
||||
project,
|
||||
actor,
|
||||
issue_activities,
|
||||
):
|
||||
# Updated Records:
|
||||
updated_records = current_instance.get("updated_module_issues", [])
|
||||
created_records = json.loads(current_instance.get("created_module_issues", []))
|
||||
|
||||
for updated_record in updated_records:
|
||||
old_module = Module.objects.filter(
|
||||
pk=updated_record.get("old_module_id", None)
|
||||
).first()
|
||||
new_module = Module.objects.filter(
|
||||
pk=updated_record.get("new_module_id", None)
|
||||
).first()
|
||||
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=updated_record.get("issue_id"),
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=old_module.name,
|
||||
new_value=new_module.name,
|
||||
field="modules",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated module from {old_module.name} to {new_module.name}",
|
||||
old_identifier=old_module.id,
|
||||
new_identifier=new_module.id,
|
||||
)
|
||||
)
|
||||
|
||||
for created_record in created_records:
|
||||
module = Module.objects.filter(
|
||||
pk=created_record.get("fields").get("module")
|
||||
).first()
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=created_record.get("fields").get("issue"),
|
||||
actor=actor,
|
||||
verb="created",
|
||||
old_value="",
|
||||
new_value=module.name,
|
||||
field="modules",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} added module {module.name}",
|
||||
new_identifier=module.id,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def create_issue_activity(
|
||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||
):
|
||||
@@ -627,7 +522,7 @@ def create_issue_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} created the issue",
|
||||
comment=f"created the issue",
|
||||
verb="created",
|
||||
actor=actor,
|
||||
)
|
||||
@@ -649,7 +544,7 @@ def track_estimate_points(
|
||||
field="estimate_point",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the estimate point to None",
|
||||
comment=f"updated the estimate point to None",
|
||||
)
|
||||
)
|
||||
else:
|
||||
@@ -663,11 +558,69 @@ def track_estimate_points(
|
||||
field="estimate_point",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the estimate point to {requested_data.get('estimate_point')}",
|
||||
comment=f"updated the estimate point to {requested_data.get('estimate_point')}",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def track_archive_at(
|
||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||
):
|
||||
if requested_data.get("archived_at") is None:
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"has restored the issue",
|
||||
verb="updated",
|
||||
actor=actor,
|
||||
field="archived_at",
|
||||
old_value="archive",
|
||||
new_value="restore",
|
||||
)
|
||||
)
|
||||
else:
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"Plane has archived the issue",
|
||||
verb="updated",
|
||||
actor=actor,
|
||||
field="archived_at",
|
||||
old_value=None,
|
||||
new_value="archive",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def track_closed_to(
|
||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||
):
|
||||
if requested_data.get("closed_to") is not None:
|
||||
updated_state = State.objects.get(
|
||||
pk=requested_data.get("closed_to"), project=project
|
||||
)
|
||||
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=None,
|
||||
new_value=updated_state.name,
|
||||
field="state",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"Plane updated the state to {updated_state.name}",
|
||||
old_identifier=None,
|
||||
new_identifier=updated_state.id,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def update_issue_activity(
|
||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||
):
|
||||
@@ -683,9 +636,9 @@ def update_issue_activity(
|
||||
"assignees_list": track_assignees,
|
||||
"blocks_list": track_blocks,
|
||||
"blockers_list": track_blockings,
|
||||
"cycles_list": track_cycles,
|
||||
"modules_list": track_modules,
|
||||
"estimate_point": track_estimate_points,
|
||||
"archived_at": track_archive_at,
|
||||
"closed_to": track_closed_to,
|
||||
}
|
||||
|
||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||
@@ -713,7 +666,7 @@ def delete_issue_activity(
|
||||
IssueActivity(
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} deleted the issue",
|
||||
comment=f"deleted the issue",
|
||||
verb="deleted",
|
||||
actor=actor,
|
||||
field="issue",
|
||||
@@ -734,7 +687,7 @@ def create_comment_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} created a comment",
|
||||
comment=f"created a comment",
|
||||
verb="created",
|
||||
actor=actor,
|
||||
field="comment",
|
||||
@@ -759,7 +712,7 @@ def update_comment_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated a comment",
|
||||
comment=f"updated a comment",
|
||||
verb="updated",
|
||||
actor=actor,
|
||||
field="comment",
|
||||
@@ -780,7 +733,7 @@ def delete_comment_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} deleted the comment",
|
||||
comment=f"deleted the comment",
|
||||
verb="deleted",
|
||||
actor=actor,
|
||||
field="comment",
|
||||
@@ -788,6 +741,177 @@ def delete_comment_activity(
|
||||
)
|
||||
|
||||
|
||||
def create_cycle_issue_activity(
|
||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||
):
|
||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||
current_instance = (
|
||||
json.loads(current_instance) if current_instance is not None else None
|
||||
)
|
||||
|
||||
# Updated Records:
|
||||
updated_records = current_instance.get("updated_cycle_issues", [])
|
||||
created_records = json.loads(current_instance.get("created_cycle_issues", []))
|
||||
|
||||
for updated_record in updated_records:
|
||||
old_cycle = Cycle.objects.filter(
|
||||
pk=updated_record.get("old_cycle_id", None)
|
||||
).first()
|
||||
new_cycle = Cycle.objects.filter(
|
||||
pk=updated_record.get("new_cycle_id", None)
|
||||
).first()
|
||||
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=updated_record.get("issue_id"),
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=old_cycle.name,
|
||||
new_value=new_cycle.name,
|
||||
field="cycles",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"updated cycle from {old_cycle.name} to {new_cycle.name}",
|
||||
old_identifier=old_cycle.id,
|
||||
new_identifier=new_cycle.id,
|
||||
)
|
||||
)
|
||||
|
||||
for created_record in created_records:
|
||||
cycle = Cycle.objects.filter(
|
||||
pk=created_record.get("fields").get("cycle")
|
||||
).first()
|
||||
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=created_record.get("fields").get("issue"),
|
||||
actor=actor,
|
||||
verb="created",
|
||||
old_value="",
|
||||
new_value=cycle.name,
|
||||
field="cycles",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"added cycle {cycle.name}",
|
||||
new_identifier=cycle.id,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def delete_cycle_issue_activity(
|
||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||
):
|
||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||
current_instance = (
|
||||
json.loads(current_instance) if current_instance is not None else None
|
||||
)
|
||||
|
||||
cycle_id = requested_data.get("cycle_id", "")
|
||||
cycle = Cycle.objects.filter(pk=cycle_id).first()
|
||||
issues = requested_data.get("issues")
|
||||
|
||||
for issue in issues:
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue,
|
||||
actor=actor,
|
||||
verb="deleted",
|
||||
old_value=cycle.name if cycle is not None else "",
|
||||
new_value="",
|
||||
field="cycles",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"removed this issue from {cycle.name if cycle is not None else None}",
|
||||
old_identifier=cycle.id if cycle is not None else None,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def create_module_issue_activity(
|
||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||
):
|
||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||
current_instance = (
|
||||
json.loads(current_instance) if current_instance is not None else None
|
||||
)
|
||||
|
||||
# Updated Records:
|
||||
updated_records = current_instance.get("updated_module_issues", [])
|
||||
created_records = json.loads(current_instance.get("created_module_issues", []))
|
||||
|
||||
for updated_record in updated_records:
|
||||
old_module = Module.objects.filter(
|
||||
pk=updated_record.get("old_module_id", None)
|
||||
).first()
|
||||
new_module = Module.objects.filter(
|
||||
pk=updated_record.get("new_module_id", None)
|
||||
).first()
|
||||
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=updated_record.get("issue_id"),
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=old_module.name,
|
||||
new_value=new_module.name,
|
||||
field="modules",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"updated module from {old_module.name} to {new_module.name}",
|
||||
old_identifier=old_module.id,
|
||||
new_identifier=new_module.id,
|
||||
)
|
||||
)
|
||||
|
||||
for created_record in created_records:
|
||||
module = Module.objects.filter(
|
||||
pk=created_record.get("fields").get("module")
|
||||
).first()
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=created_record.get("fields").get("issue"),
|
||||
actor=actor,
|
||||
verb="created",
|
||||
old_value="",
|
||||
new_value=module.name,
|
||||
field="modules",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"added module {module.name}",
|
||||
new_identifier=module.id,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def delete_module_issue_activity(
|
||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||
):
|
||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||
current_instance = (
|
||||
json.loads(current_instance) if current_instance is not None else None
|
||||
)
|
||||
|
||||
module_id = requested_data.get("module_id", "")
|
||||
module = Module.objects.filter(pk=module_id).first()
|
||||
issues = requested_data.get("issues")
|
||||
|
||||
for issue in issues:
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue,
|
||||
actor=actor,
|
||||
verb="deleted",
|
||||
old_value=module.name if module is not None else "",
|
||||
new_value="",
|
||||
field="modules",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"removed this issue from {module.name if module is not None else None}",
|
||||
old_identifier=module.id if module is not None else None,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def create_link_activity(
|
||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||
):
|
||||
@@ -801,7 +925,7 @@ def create_link_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} created a link",
|
||||
comment=f"created a link",
|
||||
verb="created",
|
||||
actor=actor,
|
||||
field="link",
|
||||
@@ -825,7 +949,7 @@ def update_link_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated a link",
|
||||
comment=f"updated a link",
|
||||
verb="updated",
|
||||
actor=actor,
|
||||
field="link",
|
||||
@@ -840,15 +964,22 @@ def update_link_activity(
|
||||
def delete_link_activity(
|
||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||
):
|
||||
|
||||
current_instance = (
|
||||
json.loads(current_instance) if current_instance is not None else None
|
||||
)
|
||||
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} deleted the link",
|
||||
comment=f"deleted the link",
|
||||
verb="deleted",
|
||||
actor=actor,
|
||||
field="link",
|
||||
old_value=current_instance.get("url", ""),
|
||||
new_value=""
|
||||
)
|
||||
)
|
||||
|
||||
@@ -866,11 +997,11 @@ def create_attachment_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} created an attachment",
|
||||
comment=f"created an attachment",
|
||||
verb="created",
|
||||
actor=actor,
|
||||
field="attachment",
|
||||
new_value=current_instance.get("access", ""),
|
||||
new_value=current_instance.get("asset", ""),
|
||||
new_identifier=current_instance.get("id", None),
|
||||
)
|
||||
)
|
||||
@@ -884,7 +1015,7 @@ def delete_attachment_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} deleted the attachment",
|
||||
comment=f"deleted the attachment",
|
||||
verb="deleted",
|
||||
actor=actor,
|
||||
field="attachment",
|
||||
@@ -895,7 +1026,13 @@ def delete_attachment_activity(
|
||||
# Receive message from room group
|
||||
@shared_task
|
||||
def issue_activity(
|
||||
type, requested_data, current_instance, issue_id, actor_id, project_id
|
||||
type,
|
||||
requested_data,
|
||||
current_instance,
|
||||
issue_id,
|
||||
actor_id,
|
||||
project_id,
|
||||
subscriber=True,
|
||||
):
|
||||
try:
|
||||
issue_activities = []
|
||||
@@ -903,6 +1040,30 @@ def issue_activity(
|
||||
actor = User.objects.get(pk=actor_id)
|
||||
project = Project.objects.get(pk=project_id)
|
||||
|
||||
if type not in [
|
||||
"cycle.activity.created",
|
||||
"cycle.activity.deleted",
|
||||
"module.activity.created",
|
||||
"module.activity.deleted",
|
||||
]:
|
||||
issue = Issue.objects.filter(pk=issue_id).first()
|
||||
|
||||
if issue is not None:
|
||||
try:
|
||||
issue.updated_at = timezone.now()
|
||||
issue.save(update_fields=["updated_at"])
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
if subscriber:
|
||||
# add the user to issue subscriber
|
||||
try:
|
||||
_ = IssueSubscriber.objects.get_or_create(
|
||||
issue_id=issue_id, subscriber=actor
|
||||
)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
ACTIVITY_MAPPER = {
|
||||
"issue.activity.created": create_issue_activity,
|
||||
"issue.activity.updated": update_issue_activity,
|
||||
@@ -910,6 +1071,10 @@ def issue_activity(
|
||||
"comment.activity.created": create_comment_activity,
|
||||
"comment.activity.updated": update_comment_activity,
|
||||
"comment.activity.deleted": delete_comment_activity,
|
||||
"cycle.activity.created": create_cycle_issue_activity,
|
||||
"cycle.activity.deleted": delete_cycle_issue_activity,
|
||||
"module.activity.created": create_module_issue_activity,
|
||||
"module.activity.deleted": delete_module_issue_activity,
|
||||
"link.activity.created": create_link_activity,
|
||||
"link.activity.updated": update_link_activity,
|
||||
"link.activity.deleted": delete_link_activity,
|
||||
@@ -933,19 +1098,101 @@ def issue_activity(
|
||||
# Post the updates to segway for integrations and webhooks
|
||||
if len(issue_activities_created):
|
||||
# Don't send activities if the actor is a bot
|
||||
if settings.PROXY_BASE_URL:
|
||||
try:
|
||||
if settings.PROXY_BASE_URL:
|
||||
for issue_activity in issue_activities_created:
|
||||
headers = {"Content-Type": "application/json"}
|
||||
issue_activity_json = json.dumps(
|
||||
IssueActivitySerializer(issue_activity).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
_ = requests.post(
|
||||
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(issue_activity.workspace_id)}/projects/{str(issue_activity.project_id)}/issues/{str(issue_activity.issue_id)}/issue-activity-hooks/",
|
||||
json=issue_activity_json,
|
||||
headers=headers,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
|
||||
if type not in [
|
||||
"cycle.activity.created",
|
||||
"cycle.activity.deleted",
|
||||
"module.activity.created",
|
||||
"module.activity.deleted",
|
||||
]:
|
||||
# Create Notifications
|
||||
bulk_notifications = []
|
||||
|
||||
issue_subscribers = list(
|
||||
IssueSubscriber.objects.filter(project=project, issue_id=issue_id)
|
||||
.exclude(subscriber_id=actor_id)
|
||||
.values_list("subscriber", flat=True)
|
||||
)
|
||||
|
||||
issue_assignees = list(
|
||||
IssueAssignee.objects.filter(project=project, issue_id=issue_id)
|
||||
.exclude(assignee_id=actor_id)
|
||||
.values_list("assignee", flat=True)
|
||||
)
|
||||
|
||||
issue_subscribers = issue_subscribers + issue_assignees
|
||||
|
||||
issue = Issue.objects.filter(pk=issue_id).first()
|
||||
|
||||
# Add bot filtering
|
||||
if (
|
||||
issue is not None
|
||||
and issue.created_by_id is not None
|
||||
and not issue.created_by.is_bot
|
||||
and str(issue.created_by_id) != str(actor_id)
|
||||
):
|
||||
issue_subscribers = issue_subscribers + [issue.created_by_id]
|
||||
|
||||
for subscriber in issue_subscribers:
|
||||
for issue_activity in issue_activities_created:
|
||||
headers = {"Content-Type": "application/json"}
|
||||
issue_activity_json = json.dumps(
|
||||
IssueActivitySerializer(issue_activity).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
_ = requests.post(
|
||||
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(issue_activity.workspace_id)}/projects/{str(issue_activity.project_id)}/issues/{str(issue_activity.issue_id)}/issue-activity-hooks/",
|
||||
json=issue_activity_json,
|
||||
headers=headers,
|
||||
bulk_notifications.append(
|
||||
Notification(
|
||||
workspace=project.workspace,
|
||||
sender="in_app:issue_activities",
|
||||
triggered_by_id=actor_id,
|
||||
receiver_id=subscriber,
|
||||
entity_identifier=issue_id,
|
||||
entity_name="issue",
|
||||
project=project,
|
||||
title=issue_activity.comment,
|
||||
data={
|
||||
"issue": {
|
||||
"id": str(issue_id),
|
||||
"name": str(issue.name),
|
||||
"identifier": str(issue.project.identifier),
|
||||
"sequence_id": issue.sequence_id,
|
||||
"state_name": issue.state.name,
|
||||
"state_group": issue.state.group,
|
||||
},
|
||||
"issue_activity": {
|
||||
"id": str(issue_activity.id),
|
||||
"verb": str(issue_activity.verb),
|
||||
"field": str(issue_activity.field),
|
||||
"actor": str(issue_activity.actor_id),
|
||||
"new_value": str(issue_activity.new_value),
|
||||
"old_value": str(issue_activity.old_value),
|
||||
"issue_comment": str(
|
||||
issue_activity.issue_comment.comment_stripped
|
||||
if issue_activity.issue_comment is not None
|
||||
else ""
|
||||
),
|
||||
},
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
# Bulk create notifications
|
||||
Notification.objects.bulk_create(bulk_notifications, batch_size=100)
|
||||
|
||||
return
|
||||
except Exception as e:
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
|
||||
157
apiserver/plane/bgtasks/issue_automation_task.py
Normal file
157
apiserver/plane/bgtasks/issue_automation_task.py
Normal file
@@ -0,0 +1,157 @@
|
||||
# Python imports
|
||||
import json
|
||||
from datetime import timedelta
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import Issue, Project, State
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
|
||||
|
||||
@shared_task
|
||||
def archive_and_close_old_issues():
|
||||
archive_old_issues()
|
||||
close_old_issues()
|
||||
|
||||
|
||||
def archive_old_issues():
|
||||
try:
|
||||
# Get all the projects whose archive_in is greater than 0
|
||||
projects = Project.objects.filter(archive_in__gt=0)
|
||||
|
||||
for project in projects:
|
||||
project_id = project.id
|
||||
archive_in = project.archive_in
|
||||
|
||||
# Get all the issues whose updated_at in less that the archive_in month
|
||||
issues = Issue.objects.filter(
|
||||
Q(
|
||||
project=project_id,
|
||||
archived_at__isnull=True,
|
||||
updated_at__lte=(timezone.now() - timedelta(days=archive_in * 30)),
|
||||
state__group__in=["completed", "cancelled"],
|
||||
),
|
||||
Q(issue_cycle__isnull=True)
|
||||
| (
|
||||
Q(issue_cycle__cycle__end_date__lt=timezone.now().date())
|
||||
& Q(issue_cycle__isnull=False)
|
||||
),
|
||||
Q(issue_module__isnull=True)
|
||||
| (
|
||||
Q(issue_module__module__target_date__lt=timezone.now().date())
|
||||
& Q(issue_module__isnull=False)
|
||||
),
|
||||
).filter(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True)
|
||||
)
|
||||
|
||||
# Check if Issues
|
||||
if issues:
|
||||
issues_to_update = []
|
||||
for issue in issues:
|
||||
issue.archived_at = timezone.now()
|
||||
issues_to_update.append(issue)
|
||||
|
||||
# Bulk Update the issues and log the activity
|
||||
Issue.objects.bulk_update(
|
||||
issues_to_update, ["archived_at"], batch_size=100
|
||||
)
|
||||
[
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=json.dumps({"archived_at": str(issue.archived_at)}),
|
||||
actor_id=str(project.created_by_id),
|
||||
issue_id=issue.id,
|
||||
project_id=project_id,
|
||||
current_instance=None,
|
||||
subscriber=False,
|
||||
)
|
||||
for issue in issues_to_update
|
||||
]
|
||||
return
|
||||
except Exception as e:
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
|
||||
|
||||
def close_old_issues():
|
||||
try:
|
||||
# Get all the projects whose close_in is greater than 0
|
||||
projects = Project.objects.filter(close_in__gt=0).select_related(
|
||||
"default_state"
|
||||
)
|
||||
|
||||
for project in projects:
|
||||
project_id = project.id
|
||||
close_in = project.close_in
|
||||
|
||||
# Get all the issues whose updated_at in less that the close_in month
|
||||
issues = Issue.objects.filter(
|
||||
Q(
|
||||
project=project_id,
|
||||
archived_at__isnull=True,
|
||||
updated_at__lte=(timezone.now() - timedelta(days=close_in * 30)),
|
||||
state__group__in=["backlog", "unstarted", "started"],
|
||||
),
|
||||
Q(issue_cycle__isnull=True)
|
||||
| (
|
||||
Q(issue_cycle__cycle__end_date__lt=timezone.now().date())
|
||||
& Q(issue_cycle__isnull=False)
|
||||
),
|
||||
Q(issue_module__isnull=True)
|
||||
| (
|
||||
Q(issue_module__module__target_date__lt=timezone.now().date())
|
||||
& Q(issue_module__isnull=False)
|
||||
),
|
||||
).filter(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True)
|
||||
)
|
||||
|
||||
# Check if Issues
|
||||
if issues:
|
||||
if project.default_state is None:
|
||||
close_state = State.objects.filter(group="cancelled").first()
|
||||
else:
|
||||
close_state = project.default_state
|
||||
|
||||
issues_to_update = []
|
||||
for issue in issues:
|
||||
issue.state = close_state
|
||||
issues_to_update.append(issue)
|
||||
|
||||
# Bulk Update the issues and log the activity
|
||||
Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100)
|
||||
[
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=json.dumps({"closed_to": str(issue.state_id)}),
|
||||
actor_id=str(project.created_by_id),
|
||||
issue_id=issue.id,
|
||||
project_id=project_id,
|
||||
current_instance=None,
|
||||
subscriber=False,
|
||||
)
|
||||
for issue in issues_to_update
|
||||
]
|
||||
return
|
||||
except Exception as e:
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
@@ -13,7 +13,7 @@ from sentry_sdk import capture_exception
|
||||
def magic_link(email, key, token, current_site):
|
||||
try:
|
||||
realtivelink = f"/magic-sign-in/?password={token}&key={key}"
|
||||
abs_url = "http://" + current_site + realtivelink
|
||||
abs_url = current_site + realtivelink
|
||||
|
||||
from_email_string = settings.EMAIL_FROM
|
||||
|
||||
@@ -31,4 +31,7 @@ def magic_link(email, key, token, current_site):
|
||||
return
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
return
|
||||
|
||||
@@ -21,7 +21,7 @@ def project_invitation(email, project_id, token, current_site):
|
||||
)
|
||||
|
||||
relativelink = f"/project-member-invitation/{project_member_invite.id}"
|
||||
abs_url = "http://" + current_site + relativelink
|
||||
abs_url = current_site + relativelink
|
||||
|
||||
from_email_string = settings.EMAIL_FROM
|
||||
|
||||
@@ -50,5 +50,8 @@ def project_invitation(email, project_id, token, current_site):
|
||||
except (Project.DoesNotExist, ProjectMemberInvite.DoesNotExist) as e:
|
||||
return
|
||||
except Exception as e:
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
|
||||
36
apiserver/plane/bgtasks/user_welcome_task.py
Normal file
36
apiserver/plane/bgtasks/user_welcome_task.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
from slack_sdk import WebClient
|
||||
from slack_sdk.errors import SlackApiError
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import User
|
||||
|
||||
|
||||
@shared_task
|
||||
def send_welcome_slack(user_id, created, message):
|
||||
try:
|
||||
instance = User.objects.get(pk=user_id)
|
||||
|
||||
if created and not instance.is_bot:
|
||||
# Send message on slack as well
|
||||
if settings.SLACK_BOT_TOKEN:
|
||||
client = WebClient(token=settings.SLACK_BOT_TOKEN)
|
||||
try:
|
||||
_ = client.chat_postMessage(
|
||||
channel="#trackers",
|
||||
text=message,
|
||||
)
|
||||
except SlackApiError as e:
|
||||
print(f"Got an error: {e.response['error']}")
|
||||
return
|
||||
except Exception as e:
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
@@ -23,9 +23,9 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
|
||||
)
|
||||
|
||||
realtivelink = (
|
||||
f"/workspace-member-invitation/{workspace_member_invite.id}?email={email}"
|
||||
f"/workspace-member-invitation/?invitation_id={workspace_member_invite.id}&email={email}"
|
||||
)
|
||||
abs_url = "http://" + current_site + realtivelink
|
||||
abs_url = current_site + realtivelink
|
||||
|
||||
from_email_string = settings.EMAIL_FROM
|
||||
|
||||
@@ -66,5 +66,8 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
|
||||
except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist) as e:
|
||||
return
|
||||
except Exception as e:
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
from celery import Celery
|
||||
from plane.settings.redis import redis_instance
|
||||
from celery.schedules import crontab
|
||||
|
||||
# Set the default Django settings module for the 'celery' program.
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
|
||||
@@ -13,5 +14,19 @@ app = Celery("plane")
|
||||
# pickle the object when using Windows.
|
||||
app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
|
||||
app.conf.beat_schedule = {
|
||||
# Executes every day at 12 AM
|
||||
"check-every-day-to-archive-and-close": {
|
||||
"task": "plane.bgtasks.issue_automation_task.archive_and_close_old_issues",
|
||||
"schedule": crontab(hour=0, minute=0),
|
||||
},
|
||||
"check-every-day-to-delete_exporter_history": {
|
||||
"task": "plane.bgtasks.exporter_expired_task.delete_old_s3_link",
|
||||
"schedule": crontab(hour=0, minute=0),
|
||||
},
|
||||
}
|
||||
|
||||
# Load task modules from all registered Django app configs.
|
||||
app.autodiscover_tasks()
|
||||
|
||||
app.conf.beat_scheduler = 'django_celery_beat.schedulers.DatabaseScheduler'
|
||||
58
apiserver/plane/db/migrations/0029_auto_20230502_0126.py
Normal file
58
apiserver/plane/db/migrations/0029_auto_20230502_0126.py
Normal file
@@ -0,0 +1,58 @@
|
||||
# Generated by Django 3.2.18 on 2023-05-01 19:56
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0028_auto_20230414_1703'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='cycle',
|
||||
name='view_props',
|
||||
field=models.JSONField(default=dict),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='importer',
|
||||
name='imported_data',
|
||||
field=models.JSONField(null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='module',
|
||||
name='view_props',
|
||||
field=models.JSONField(default=dict),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SlackProjectSync',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('access_token', models.CharField(max_length=300)),
|
||||
('scopes', models.TextField()),
|
||||
('bot_user_id', models.CharField(max_length=50)),
|
||||
('webhook_url', models.URLField(max_length=1000)),
|
||||
('data', models.JSONField(default=dict)),
|
||||
('team_id', models.CharField(max_length=30)),
|
||||
('team_name', models.CharField(max_length=300)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='slackprojectsync_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_slackprojectsync', to='db.project')),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='slackprojectsync_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_slackprojectsync', to='db.workspace')),
|
||||
('workspace_integration', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='slack_syncs', to='db.workspaceintegration')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Slack Project Sync',
|
||||
'verbose_name_plural': 'Slack Project Syncs',
|
||||
'db_table': 'slack_project_syncs',
|
||||
'ordering': ('-created_at',),
|
||||
'unique_together': {('team_id', 'project')},
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 3.2.18 on 2023-05-05 14:17
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0029_auto_20230502_0126'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name='estimatepoint',
|
||||
unique_together=set(),
|
||||
),
|
||||
]
|
||||
37
apiserver/plane/db/migrations/0031_analyticview.py
Normal file
37
apiserver/plane/db/migrations/0031_analyticview.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# Generated by Django 3.2.18 on 2023-05-12 11:31
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0030_alter_estimatepoint_unique_together'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AnalyticView',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('name', models.CharField(max_length=255)),
|
||||
('description', models.TextField(blank=True)),
|
||||
('query', models.JSONField()),
|
||||
('query_dict', models.JSONField(default=dict)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='analyticview_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='analyticview_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics', to='db.workspace')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Analytic',
|
||||
'verbose_name_plural': 'Analytics',
|
||||
'db_table': 'analytic_views',
|
||||
'ordering': ('-created_at',),
|
||||
},
|
||||
),
|
||||
]
|
||||
23
apiserver/plane/db/migrations/0032_auto_20230520_2015.py
Normal file
23
apiserver/plane/db/migrations/0032_auto_20230520_2015.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.19 on 2023-05-20 14:45
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0031_analyticview'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='project',
|
||||
old_name='icon',
|
||||
new_name='emoji',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='icon_prop',
|
||||
field=models.JSONField(null=True),
|
||||
),
|
||||
]
|
||||
83
apiserver/plane/db/migrations/0033_auto_20230618_2125.py
Normal file
83
apiserver/plane/db/migrations/0033_auto_20230618_2125.py
Normal file
@@ -0,0 +1,83 @@
|
||||
# Generated by Django 3.2.19 on 2023-06-18 15:55
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0032_auto_20230520_2015'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Inbox',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('name', models.CharField(max_length=255)),
|
||||
('description', models.TextField(blank=True, verbose_name='Inbox Description')),
|
||||
('is_default', models.BooleanField(default=False)),
|
||||
('view_props', models.JSONField(default=dict)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='inbox_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Inbox',
|
||||
'verbose_name_plural': 'Inboxes',
|
||||
'db_table': 'inboxes',
|
||||
'ordering': ('name',),
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='inbox_view',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='InboxIssue',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('status', models.IntegerField(choices=[(-2, 'Pending'), (-1, 'Rejected'), (0, 'Snoozed'), (1, 'Accepted'), (2, 'Duplicate')], default=-2)),
|
||||
('snoozed_till', models.DateTimeField(null=True)),
|
||||
('source', models.TextField(blank=True, null=True)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='inboxissue_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('duplicate_to', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='inbox_duplicate', to='db.issue')),
|
||||
('inbox', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_inbox', to='db.inbox')),
|
||||
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_inbox', to='db.issue')),
|
||||
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_inboxissue', to='db.project')),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='inboxissue_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_inboxissue', to='db.workspace')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'InboxIssue',
|
||||
'verbose_name_plural': 'InboxIssues',
|
||||
'db_table': 'inbox_issues',
|
||||
'ordering': ('-created_at',),
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inbox',
|
||||
name='project',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_inbox', to='db.project'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inbox',
|
||||
name='updated_by',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='inbox_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inbox',
|
||||
name='workspace',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_inbox', to='db.workspace'),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='inbox',
|
||||
unique_together={('name', 'project')},
|
||||
),
|
||||
]
|
||||
39
apiserver/plane/db/migrations/0034_auto_20230628_1046.py
Normal file
39
apiserver/plane/db/migrations/0034_auto_20230628_1046.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# Generated by Django 3.2.19 on 2023-06-28 05:16
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0033_auto_20230618_2125'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='timelineissue',
|
||||
name='created_by',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='timelineissue',
|
||||
name='issue',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='timelineissue',
|
||||
name='project',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='timelineissue',
|
||||
name='updated_by',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='timelineissue',
|
||||
name='workspace',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='Shortcut',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='TimelineIssue',
|
||||
),
|
||||
]
|
||||
42
apiserver/plane/db/migrations/0035_auto_20230704_2225.py
Normal file
42
apiserver/plane/db/migrations/0035_auto_20230704_2225.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Generated by Django 3.2.19 on 2023-07-04 16:55
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def update_company_organization_size(apps, schema_editor):
|
||||
Model = apps.get_model("db", "Workspace")
|
||||
updated_size = []
|
||||
for obj in Model.objects.all():
|
||||
obj.organization_size = str(obj.company_size)
|
||||
updated_size.append(obj)
|
||||
|
||||
Model.objects.bulk_update(updated_size, ["organization_size"], batch_size=100)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("db", "0034_auto_20230628_1046"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="workspace",
|
||||
name="organization_size",
|
||||
field=models.CharField(default="2-10", max_length=20),
|
||||
),
|
||||
migrations.RunPython(update_company_organization_size),
|
||||
migrations.AlterField(
|
||||
model_name="workspace",
|
||||
name="name",
|
||||
field=models.CharField(max_length=80, verbose_name="Workspace Name"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="workspace",
|
||||
name="slug",
|
||||
field=models.SlugField(max_length=48, unique=True),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="workspace",
|
||||
name="company_size",
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.19 on 2023-07-05 07:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0035_auto_20230704_2225'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='workspace',
|
||||
name='organization_size',
|
||||
field=models.CharField(max_length=20),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,266 @@
|
||||
# Generated by Django 4.2.3 on 2023-07-19 06:52
|
||||
|
||||
from django.conf import settings
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import plane.db.models.user
|
||||
import uuid
|
||||
|
||||
|
||||
|
||||
def onboarding_default_steps(apps, schema_editor):
|
||||
default_onboarding_schema = {
|
||||
"workspace_join": True,
|
||||
"profile_complete": True,
|
||||
"workspace_create": True,
|
||||
"workspace_invite": True,
|
||||
}
|
||||
|
||||
Model = apps.get_model("db", "User")
|
||||
updated_user = []
|
||||
for obj in Model.objects.filter(is_onboarded=True):
|
||||
obj.onboarding_step = default_onboarding_schema
|
||||
obj.is_tour_completed = True
|
||||
updated_user.append(obj)
|
||||
|
||||
Model.objects.bulk_update(updated_user, ["onboarding_step", "is_tour_completed"], batch_size=100)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("db", "0036_alter_workspace_organization_size"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="issue",
|
||||
name="archived_at",
|
||||
field=models.DateField(null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="project",
|
||||
name="archive_in",
|
||||
field=models.IntegerField(
|
||||
default=0,
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(0),
|
||||
django.core.validators.MaxValueValidator(12),
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="project",
|
||||
name="close_in",
|
||||
field=models.IntegerField(
|
||||
default=0,
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(0),
|
||||
django.core.validators.MaxValueValidator(12),
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="project",
|
||||
name="default_state",
|
||||
field=models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="default_state",
|
||||
to="db.state",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="is_tour_completed",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="onboarding_step",
|
||||
field=models.JSONField(default=plane.db.models.user.get_default_onboarding),
|
||||
),
|
||||
migrations.RunPython(onboarding_default_steps),
|
||||
migrations.CreateModel(
|
||||
name="Notification",
|
||||
fields=[
|
||||
(
|
||||
"created_at",
|
||||
models.DateTimeField(auto_now_add=True, verbose_name="Created At"),
|
||||
),
|
||||
(
|
||||
"updated_at",
|
||||
models.DateTimeField(
|
||||
auto_now=True, verbose_name="Last Modified At"
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
db_index=True,
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
("data", models.JSONField(null=True)),
|
||||
("entity_identifier", models.UUIDField(null=True)),
|
||||
("entity_name", models.CharField(max_length=255)),
|
||||
("title", models.TextField()),
|
||||
("message", models.JSONField(null=True)),
|
||||
("message_html", models.TextField(blank=True, default="<p></p>")),
|
||||
("message_stripped", models.TextField(blank=True, null=True)),
|
||||
("sender", models.CharField(max_length=255)),
|
||||
("read_at", models.DateTimeField(null=True)),
|
||||
("snoozed_till", models.DateTimeField(null=True)),
|
||||
("archived_at", models.DateTimeField(null=True)),
|
||||
(
|
||||
"created_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="%(class)s_created_by",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="Created By",
|
||||
),
|
||||
),
|
||||
(
|
||||
"project",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="notifications",
|
||||
to="db.project",
|
||||
),
|
||||
),
|
||||
(
|
||||
"receiver",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="received_notifications",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"triggered_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="triggered_notifications",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"updated_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="%(class)s_updated_by",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="Last Modified By",
|
||||
),
|
||||
),
|
||||
(
|
||||
"workspace",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="notifications",
|
||||
to="db.workspace",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Notification",
|
||||
"verbose_name_plural": "Notifications",
|
||||
"db_table": "notifications",
|
||||
"ordering": ("-created_at",),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="IssueSubscriber",
|
||||
fields=[
|
||||
(
|
||||
"created_at",
|
||||
models.DateTimeField(auto_now_add=True, verbose_name="Created At"),
|
||||
),
|
||||
(
|
||||
"updated_at",
|
||||
models.DateTimeField(
|
||||
auto_now=True, verbose_name="Last Modified At"
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
db_index=True,
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"created_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="%(class)s_created_by",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="Created By",
|
||||
),
|
||||
),
|
||||
(
|
||||
"issue",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="issue_subscribers",
|
||||
to="db.issue",
|
||||
),
|
||||
),
|
||||
(
|
||||
"project",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="project_%(class)s",
|
||||
to="db.project",
|
||||
),
|
||||
),
|
||||
(
|
||||
"subscriber",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="issue_subscribers",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"updated_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="%(class)s_updated_by",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="Last Modified By",
|
||||
),
|
||||
),
|
||||
(
|
||||
"workspace",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="workspace_%(class)s",
|
||||
to="db.workspace",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Issue Subscriber",
|
||||
"verbose_name_plural": "Issue Subscribers",
|
||||
"db_table": "issue_subscribers",
|
||||
"ordering": ("-created_at",),
|
||||
"unique_together": {("issue", "subscriber")},
|
||||
},
|
||||
),
|
||||
]
|
||||
35
apiserver/plane/db/migrations/0038_auto_20230720_1505.py
Normal file
35
apiserver/plane/db/migrations/0038_auto_20230720_1505.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# Generated by Django 4.2.3 on 2023-07-20 09:35
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def restructure_theming(apps, schema_editor):
|
||||
Model = apps.get_model("db", "User")
|
||||
updated_user = []
|
||||
for obj in Model.objects.exclude(theme={}).all():
|
||||
current_theme = obj.theme
|
||||
updated_theme = {
|
||||
"primary": current_theme.get("accent", ""),
|
||||
"background": current_theme.get("bgBase", ""),
|
||||
"sidebarBackground": current_theme.get("sidebar", ""),
|
||||
"text": current_theme.get("textBase", ""),
|
||||
"sidebarText": current_theme.get("textBase", ""),
|
||||
"palette": f"""{current_theme.get("bgBase","")},{current_theme.get("textBase", "")},{current_theme.get("accent", "")},{current_theme.get("sidebar","")},{current_theme.get("textBase", "")}""",
|
||||
"darkPalette": current_theme.get("darkPalette", "")
|
||||
}
|
||||
obj.theme = updated_theme
|
||||
updated_user.append(obj)
|
||||
|
||||
Model.objects.bulk_update(
|
||||
updated_user, ["theme"], batch_size=100
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("db", "0037_issue_archived_at_project_archive_in_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(restructure_theming)
|
||||
]
|
||||
97
apiserver/plane/db/migrations/0039_auto_20230723_2203.py
Normal file
97
apiserver/plane/db/migrations/0039_auto_20230723_2203.py
Normal file
@@ -0,0 +1,97 @@
|
||||
# Generated by Django 4.2.3 on 2023-07-23 16:33
|
||||
import random
|
||||
from django.db import migrations, models
|
||||
import plane.db.models.workspace
|
||||
|
||||
|
||||
def rename_field(apps, schema_editor):
|
||||
Model = apps.get_model("db", "IssueActivity")
|
||||
updated_activity = []
|
||||
for obj in Model.objects.filter(field="assignee"):
|
||||
obj.field = "assignees"
|
||||
updated_activity.append(obj)
|
||||
|
||||
Model.objects.bulk_update(updated_activity, ["field"], batch_size=100)
|
||||
|
||||
|
||||
def update_workspace_member_props(apps, schema_editor):
|
||||
Model = apps.get_model("db", "WorkspaceMember")
|
||||
|
||||
updated_workspace_member = []
|
||||
|
||||
for obj in Model.objects.all():
|
||||
if obj.view_props is None:
|
||||
obj.view_props = {
|
||||
"filters": {"type": None},
|
||||
"groupByProperty": None,
|
||||
"issueView": "list",
|
||||
"orderBy": "-created_at",
|
||||
"properties": {
|
||||
"assignee": True,
|
||||
"due_date": True,
|
||||
"key": True,
|
||||
"labels": True,
|
||||
"priority": True,
|
||||
"state": True,
|
||||
"sub_issue_count": True,
|
||||
"attachment_count": True,
|
||||
"link": True,
|
||||
"estimate": True,
|
||||
"created_on": True,
|
||||
"updated_on": True,
|
||||
},
|
||||
"showEmptyGroups": True,
|
||||
}
|
||||
else:
|
||||
current_view_props = obj.view_props
|
||||
obj.view_props = {
|
||||
"filters": {"type": None},
|
||||
"groupByProperty": None,
|
||||
"issueView": "list",
|
||||
"orderBy": "-created_at",
|
||||
"showEmptyGroups": True,
|
||||
"properties": current_view_props,
|
||||
}
|
||||
|
||||
updated_workspace_member.append(obj)
|
||||
|
||||
Model.objects.bulk_update(updated_workspace_member, ["view_props"], batch_size=100)
|
||||
|
||||
|
||||
def update_project_member_sort_order(apps, schema_editor):
|
||||
Model = apps.get_model("db", "ProjectMember")
|
||||
|
||||
updated_project_members = []
|
||||
|
||||
for obj in Model.objects.all():
|
||||
obj.sort_order = random.randint(1, 65536)
|
||||
updated_project_members.append(obj)
|
||||
|
||||
Model.objects.bulk_update(updated_project_members, ["sort_order"], batch_size=100)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("db", "0038_auto_20230720_1505"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(rename_field),
|
||||
migrations.RunPython(update_workspace_member_props),
|
||||
migrations.AlterField(
|
||||
model_name='workspacemember',
|
||||
name='view_props',
|
||||
field=models.JSONField(default=plane.db.models.workspace.get_default_props),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workspacemember',
|
||||
name='default_props',
|
||||
field=models.JSONField(default=plane.db.models.workspace.get_default_props),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='projectmember',
|
||||
name='sort_order',
|
||||
field=models.FloatField(default=65535),
|
||||
),
|
||||
migrations.RunPython(update_project_member_sort_order),
|
||||
]
|
||||
@@ -0,0 +1,81 @@
|
||||
# Generated by Django 4.2.3 on 2023-08-01 06:02
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import plane.db.models.project
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0039_auto_20230723_2203'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='projectmember',
|
||||
name='preferences',
|
||||
field=models.JSONField(default=plane.db.models.project.get_default_preferences),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='cover_image',
|
||||
field=models.URLField(blank=True, max_length=800, null=True),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='IssueReaction',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('reaction', models.CharField(max_length=20)),
|
||||
('actor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_reactions', to=settings.AUTH_USER_MODEL)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_reactions', to='db.issue')),
|
||||
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Issue Reaction',
|
||||
'verbose_name_plural': 'Issue Reactions',
|
||||
'db_table': 'issue_reactions',
|
||||
'ordering': ('-created_at',),
|
||||
'unique_together': {('issue', 'actor', 'reaction')},
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='CommentReaction',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('reaction', models.CharField(max_length=20)),
|
||||
('actor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comment_reactions', to=settings.AUTH_USER_MODEL)),
|
||||
('comment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comment_reactions', to='db.issuecomment')),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Comment Reaction',
|
||||
'verbose_name_plural': 'Comment Reactions',
|
||||
'db_table': 'comment_reactions',
|
||||
'ordering': ('-created_at',),
|
||||
'unique_together': {('comment', 'actor', 'reaction')},
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='identifier',
|
||||
field=models.CharField(max_length=12, verbose_name='Project Identifier'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectidentifier',
|
||||
name='name',
|
||||
field=models.CharField(max_length=12),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,243 @@
|
||||
# Generated by Django 4.2.3 on 2023-08-14 07:12
|
||||
|
||||
from django.conf import settings
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import plane.db.models.exporter
|
||||
import plane.db.models.project
|
||||
import uuid
|
||||
import random
|
||||
import string
|
||||
|
||||
def generate_display_name(apps, schema_editor):
|
||||
UserModel = apps.get_model("db", "User")
|
||||
updated_users = []
|
||||
for obj in UserModel.objects.all():
|
||||
obj.display_name = (
|
||||
obj.email.split("@")[0]
|
||||
if len(obj.email.split("@"))
|
||||
else "".join(random.choice(string.ascii_letters) for _ in range(6))
|
||||
)
|
||||
updated_users.append(obj)
|
||||
UserModel.objects.bulk_update(updated_users, ["display_name"], batch_size=100)
|
||||
|
||||
|
||||
def rectify_field_issue_activity(apps, schema_editor):
|
||||
Model = apps.get_model("db", "IssueActivity")
|
||||
updated_activity = []
|
||||
for obj in Model.objects.filter(field="assignee"):
|
||||
obj.field = "assignees"
|
||||
updated_activity.append(obj)
|
||||
|
||||
Model.objects.bulk_update(updated_activity, ["field"], batch_size=100)
|
||||
|
||||
|
||||
def update_assignee_issue_activity(apps, schema_editor):
|
||||
Model = apps.get_model("db", "IssueActivity")
|
||||
updated_activity = []
|
||||
|
||||
# Get all the users
|
||||
User = apps.get_model("db", "User")
|
||||
users = User.objects.values("id", "email", "display_name")
|
||||
|
||||
for obj in Model.objects.filter(field="assignees"):
|
||||
if bool(obj.new_value) and not bool(obj.old_value):
|
||||
# Get user from list
|
||||
assigned_user = [
|
||||
user for user in users if user.get("email") == obj.new_value
|
||||
]
|
||||
if assigned_user:
|
||||
obj.new_value = assigned_user[0].get("display_name")
|
||||
obj.new_identifier = assigned_user[0].get("id")
|
||||
# Update the comment
|
||||
words = obj.comment.split()
|
||||
words[-1] = assigned_user[0].get("display_name")
|
||||
obj.comment = " ".join(words)
|
||||
|
||||
if bool(obj.old_value) and not bool(obj.new_value):
|
||||
# Get user from list
|
||||
assigned_user = [
|
||||
user for user in users if user.get("email") == obj.old_value
|
||||
]
|
||||
if assigned_user:
|
||||
obj.old_value = assigned_user[0].get("display_name")
|
||||
obj.old_identifier = assigned_user[0].get("id")
|
||||
# Update the comment
|
||||
words = obj.comment.split()
|
||||
words[-1] = assigned_user[0].get("display_name")
|
||||
obj.comment = " ".join(words)
|
||||
|
||||
updated_activity.append(obj)
|
||||
|
||||
Model.objects.bulk_update(
|
||||
updated_activity,
|
||||
["old_value", "new_value", "old_identifier", "new_identifier", "comment"],
|
||||
batch_size=200,
|
||||
)
|
||||
|
||||
|
||||
def update_name_activity(apps, schema_editor):
|
||||
Model = apps.get_model("db", "IssueActivity")
|
||||
update_activity = []
|
||||
for obj in Model.objects.filter(field="name"):
|
||||
obj.comment = obj.comment.replace("start date", "name")
|
||||
update_activity.append(obj)
|
||||
|
||||
Model.objects.bulk_update(update_activity, ["comment"], batch_size=1000)
|
||||
|
||||
|
||||
def random_cycle_order(apps, schema_editor):
|
||||
CycleModel = apps.get_model("db", "Cycle")
|
||||
updated_cycles = []
|
||||
for obj in CycleModel.objects.all():
|
||||
obj.sort_order = random.randint(1, 65536)
|
||||
updated_cycles.append(obj)
|
||||
CycleModel.objects.bulk_update(updated_cycles, ["sort_order"], batch_size=100)
|
||||
|
||||
|
||||
def random_module_order(apps, schema_editor):
|
||||
ModuleModel = apps.get_model("db", "Module")
|
||||
updated_modules = []
|
||||
for obj in ModuleModel.objects.all():
|
||||
obj.sort_order = random.randint(1, 65536)
|
||||
updated_modules.append(obj)
|
||||
ModuleModel.objects.bulk_update(updated_modules, ["sort_order"], batch_size=100)
|
||||
|
||||
|
||||
def update_user_issue_properties(apps, schema_editor):
|
||||
IssuePropertyModel = apps.get_model("db", "IssueProperty")
|
||||
updated_issue_properties = []
|
||||
for obj in IssuePropertyModel.objects.all():
|
||||
obj.properties["start_date"] = True
|
||||
updated_issue_properties.append(obj)
|
||||
IssuePropertyModel.objects.bulk_update(
|
||||
updated_issue_properties, ["properties"], batch_size=100
|
||||
)
|
||||
|
||||
|
||||
def workspace_member_properties(apps, schema_editor):
|
||||
WorkspaceMemberModel = apps.get_model("db", "WorkspaceMember")
|
||||
updated_workspace_members = []
|
||||
for obj in WorkspaceMemberModel.objects.all():
|
||||
obj.view_props["properties"]["start_date"] = True
|
||||
obj.default_props["properties"]["start_date"] = True
|
||||
updated_workspace_members.append(obj)
|
||||
|
||||
WorkspaceMemberModel.objects.bulk_update(
|
||||
updated_workspace_members, ["view_props", "default_props"], batch_size=100
|
||||
)
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0040_projectmember_preferences_user_cover_image_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='cycle',
|
||||
name='sort_order',
|
||||
field=models.FloatField(default=65535),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='issuecomment',
|
||||
name='access',
|
||||
field=models.CharField(choices=[('INTERNAL', 'INTERNAL'), ('EXTERNAL', 'EXTERNAL')], default='INTERNAL', max_length=100),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='module',
|
||||
name='sort_order',
|
||||
field=models.FloatField(default=65535),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='display_name',
|
||||
field=models.CharField(default='', max_length=255),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ExporterHistory',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('project', django.contrib.postgres.fields.ArrayField(base_field=models.UUIDField(default=uuid.uuid4), blank=True, null=True, size=None)),
|
||||
('provider', models.CharField(choices=[('json', 'json'), ('csv', 'csv'), ('xlsx', 'xlsx')], max_length=50)),
|
||||
('status', models.CharField(choices=[('queued', 'Queued'), ('processing', 'Processing'), ('completed', 'Completed'), ('failed', 'Failed')], default='queued', max_length=50)),
|
||||
('reason', models.TextField(blank=True)),
|
||||
('key', models.TextField(blank=True)),
|
||||
('url', models.URLField(blank=True, max_length=800, null=True)),
|
||||
('token', models.CharField(default=plane.db.models.exporter.generate_token, max_length=255, unique=True)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('initiated_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_exporters', to=settings.AUTH_USER_MODEL)),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_exporters', to='db.workspace')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Exporter',
|
||||
'verbose_name_plural': 'Exporters',
|
||||
'db_table': 'exporters',
|
||||
'ordering': ('-created_at',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ProjectDeployBoard',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('anchor', models.CharField(db_index=True, default=plane.db.models.project.get_anchor, max_length=255, unique=True)),
|
||||
('comments', models.BooleanField(default=False)),
|
||||
('reactions', models.BooleanField(default=False)),
|
||||
('votes', models.BooleanField(default=False)),
|
||||
('views', models.JSONField(default=plane.db.models.project.get_default_views)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('inbox', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='bord_inbox', to='db.inbox')),
|
||||
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Project Deploy Board',
|
||||
'verbose_name_plural': 'Project Deploy Boards',
|
||||
'db_table': 'project_deploy_boards',
|
||||
'ordering': ('-created_at',),
|
||||
'unique_together': {('project', 'anchor')},
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='IssueVote',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('vote', models.IntegerField(choices=[(-1, 'DOWNVOTE'), (1, 'UPVOTE')])),
|
||||
('actor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='votes', to=settings.AUTH_USER_MODEL)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='votes', to='db.issue')),
|
||||
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Issue Vote',
|
||||
'verbose_name_plural': 'Issue Votes',
|
||||
'db_table': 'issue_votes',
|
||||
'ordering': ('-created_at',),
|
||||
'unique_together': {('issue', 'actor')},
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='modulelink',
|
||||
name='title',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.RunPython(generate_display_name),
|
||||
migrations.RunPython(rectify_field_issue_activity),
|
||||
migrations.RunPython(update_assignee_issue_activity),
|
||||
migrations.RunPython(update_name_activity),
|
||||
migrations.RunPython(random_cycle_order),
|
||||
migrations.RunPython(random_module_order),
|
||||
migrations.RunPython(update_user_issue_properties),
|
||||
migrations.RunPython(workspace_member_properties),
|
||||
]
|
||||
@@ -18,12 +18,12 @@ from .project import (
|
||||
ProjectMemberInvite,
|
||||
ProjectIdentifier,
|
||||
ProjectFavorite,
|
||||
ProjectDeployBoard,
|
||||
)
|
||||
|
||||
from .issue import (
|
||||
Issue,
|
||||
IssueActivity,
|
||||
TimelineIssue,
|
||||
IssueProperty,
|
||||
IssueComment,
|
||||
IssueBlocker,
|
||||
@@ -34,6 +34,10 @@ from .issue import (
|
||||
IssueLink,
|
||||
IssueSequence,
|
||||
IssueAttachment,
|
||||
IssueSubscriber,
|
||||
IssueReaction,
|
||||
CommentReaction,
|
||||
IssueVote,
|
||||
)
|
||||
|
||||
from .asset import FileAsset
|
||||
@@ -44,8 +48,6 @@ from .state import State
|
||||
|
||||
from .cycle import Cycle, CycleIssue, CycleFavorite
|
||||
|
||||
from .shortcut import Shortcut
|
||||
|
||||
from .view import IssueView, IssueViewFavorite
|
||||
|
||||
from .module import Module, ModuleMember, ModuleIssue, ModuleLink, ModuleFavorite
|
||||
@@ -59,6 +61,7 @@ from .integration import (
|
||||
GithubRepositorySync,
|
||||
GithubIssueSync,
|
||||
GithubCommentSync,
|
||||
SlackProjectSync,
|
||||
)
|
||||
|
||||
from .importer import Importer
|
||||
@@ -66,3 +69,11 @@ from .importer import Importer
|
||||
from .page import Page, PageBlock, PageFavorite, PageLabel
|
||||
|
||||
from .estimate import Estimate, EstimatePoint
|
||||
|
||||
from .inbox import Inbox, InboxIssue
|
||||
|
||||
from .analytic import AnalyticView
|
||||
|
||||
from .notification import Notification
|
||||
|
||||
from .exporter import ExporterHistory
|
||||
25
apiserver/plane/db/models/analytic.py
Normal file
25
apiserver/plane/db/models/analytic.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Django models
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
|
||||
from .base import BaseModel
|
||||
|
||||
|
||||
class AnalyticView(BaseModel):
|
||||
workspace = models.ForeignKey(
|
||||
"db.Workspace", related_name="analytics", on_delete=models.CASCADE
|
||||
)
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.TextField(blank=True)
|
||||
query = models.JSONField()
|
||||
query_dict = models.JSONField(default=dict)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Analytic"
|
||||
verbose_name_plural = "Analytics"
|
||||
db_table = "analytic_views"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the analytic view"""
|
||||
return f"{self.name} <{self.workspace.name}>"
|
||||
@@ -4,6 +4,7 @@ from uuid import uuid4
|
||||
# Django import
|
||||
from django.db import models
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.conf import settings
|
||||
|
||||
# Module import
|
||||
from . import BaseModel
|
||||
@@ -16,8 +17,7 @@ def get_upload_path(instance, filename):
|
||||
|
||||
|
||||
def file_size(value):
|
||||
limit = 5 * 1024 * 1024
|
||||
if value.size > limit:
|
||||
if value.size > settings.FILE_SIZE_LIMIT:
|
||||
raise ValidationError("File too large. Size should not exceed 5 MB.")
|
||||
|
||||
|
||||
|
||||
@@ -16,6 +16,8 @@ class Cycle(ProjectBaseModel):
|
||||
on_delete=models.CASCADE,
|
||||
related_name="owned_by_cycle",
|
||||
)
|
||||
view_props = models.JSONField(default=dict)
|
||||
sort_order = models.FloatField(default=65535)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Cycle"
|
||||
@@ -23,6 +25,17 @@ class Cycle(ProjectBaseModel):
|
||||
db_table = "cycles"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self._state.adding:
|
||||
smallest_sort_order = Cycle.objects.filter(
|
||||
project=self.project
|
||||
).aggregate(smallest=models.Min("sort_order"))["smallest"]
|
||||
|
||||
if smallest_sort_order is not None:
|
||||
self.sort_order = smallest_sort_order - 10000
|
||||
|
||||
super(Cycle, self).save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the cycle"""
|
||||
return f"{self.name} <{self.project.name}>"
|
||||
|
||||
@@ -39,7 +39,6 @@ class EstimatePoint(ProjectBaseModel):
|
||||
return f"{self.estimate.name} <{self.key}> <{self.value}>"
|
||||
|
||||
class Meta:
|
||||
unique_together = ["value", "estimate"]
|
||||
verbose_name = "Estimate Point"
|
||||
verbose_name_plural = "Estimate Points"
|
||||
db_table = "estimate_points"
|
||||
|
||||
56
apiserver/plane/db/models/exporter.py
Normal file
56
apiserver/plane/db/models/exporter.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import uuid
|
||||
|
||||
# Python imports
|
||||
from uuid import uuid4
|
||||
|
||||
# Django imports
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
|
||||
# Module imports
|
||||
from . import BaseModel
|
||||
|
||||
def generate_token():
|
||||
return uuid4().hex
|
||||
|
||||
class ExporterHistory(BaseModel):
|
||||
workspace = models.ForeignKey(
|
||||
"db.WorkSpace", on_delete=models.CASCADE, related_name="workspace_exporters"
|
||||
)
|
||||
project = ArrayField(models.UUIDField(default=uuid.uuid4), blank=True, null=True)
|
||||
provider = models.CharField(
|
||||
max_length=50,
|
||||
choices=(
|
||||
("json", "json"),
|
||||
("csv", "csv"),
|
||||
("xlsx", "xlsx"),
|
||||
),
|
||||
)
|
||||
status = models.CharField(
|
||||
max_length=50,
|
||||
choices=(
|
||||
("queued", "Queued"),
|
||||
("processing", "Processing"),
|
||||
("completed", "Completed"),
|
||||
("failed", "Failed"),
|
||||
),
|
||||
default="queued",
|
||||
)
|
||||
reason = models.TextField(blank=True)
|
||||
key = models.TextField(blank=True)
|
||||
url = models.URLField(max_length=800, blank=True, null=True)
|
||||
token = models.CharField(max_length=255, default=generate_token, unique=True)
|
||||
initiated_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="workspace_exporters"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Exporter"
|
||||
verbose_name_plural = "Exporters"
|
||||
db_table = "exporters"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the service"""
|
||||
return f"{self.provider} <{self.workspace.name}>"
|
||||
@@ -33,6 +33,7 @@ class Importer(ProjectBaseModel):
|
||||
token = models.ForeignKey(
|
||||
"db.APIToken", on_delete=models.CASCADE, related_name="importer"
|
||||
)
|
||||
imported_data = models.JSONField(null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Importer"
|
||||
|
||||
51
apiserver/plane/db/models/inbox.py
Normal file
51
apiserver/plane/db/models/inbox.py
Normal file
@@ -0,0 +1,51 @@
|
||||
# Django imports
|
||||
from django.db import models
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import ProjectBaseModel
|
||||
|
||||
|
||||
class Inbox(ProjectBaseModel):
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.TextField(verbose_name="Inbox Description", blank=True)
|
||||
is_default = models.BooleanField(default=False)
|
||||
view_props = models.JSONField(default=dict)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the Inbox"""
|
||||
return f"{self.name} <{self.project.name}>"
|
||||
|
||||
class Meta:
|
||||
unique_together = ["name", "project"]
|
||||
verbose_name = "Inbox"
|
||||
verbose_name_plural = "Inboxes"
|
||||
db_table = "inboxes"
|
||||
ordering = ("name",)
|
||||
|
||||
|
||||
class InboxIssue(ProjectBaseModel):
|
||||
inbox = models.ForeignKey(
|
||||
"db.Inbox", related_name="issue_inbox", on_delete=models.CASCADE
|
||||
)
|
||||
issue = models.ForeignKey(
|
||||
"db.Issue", related_name="issue_inbox", on_delete=models.CASCADE
|
||||
)
|
||||
status = models.IntegerField(
|
||||
choices=((-2, "Pending"), (-1, "Rejected"), (0, "Snoozed"), (1, "Accepted"), (2, "Duplicate")),
|
||||
default=-2,
|
||||
)
|
||||
snoozed_till = models.DateTimeField(null=True)
|
||||
duplicate_to = models.ForeignKey(
|
||||
"db.Issue", related_name="inbox_duplicate", on_delete=models.SET_NULL, null=True
|
||||
)
|
||||
source = models.TextField(blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "InboxIssue"
|
||||
verbose_name_plural = "InboxIssues"
|
||||
db_table = "inbox_issues"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the Issue"""
|
||||
return f"{self.issue.name} <{self.inbox.name}>"
|
||||
@@ -1,2 +1,3 @@
|
||||
from .base import Integration, WorkspaceIntegration
|
||||
from .github import GithubRepository, GithubRepositorySync, GithubIssueSync, GithubCommentSync
|
||||
from .slack import SlackProjectSync
|
||||
32
apiserver/plane/db/models/integration/slack.py
Normal file
32
apiserver/plane/db/models/integration/slack.py
Normal file
@@ -0,0 +1,32 @@
|
||||
# Python imports
|
||||
import uuid
|
||||
|
||||
# Django imports
|
||||
from django.db import models
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import ProjectBaseModel
|
||||
|
||||
|
||||
class SlackProjectSync(ProjectBaseModel):
|
||||
access_token = models.CharField(max_length=300)
|
||||
scopes = models.TextField()
|
||||
bot_user_id = models.CharField(max_length=50)
|
||||
webhook_url = models.URLField(max_length=1000)
|
||||
data = models.JSONField(default=dict)
|
||||
team_id = models.CharField(max_length=30)
|
||||
team_name = models.CharField(max_length=300)
|
||||
workspace_integration = models.ForeignKey(
|
||||
"db.WorkspaceIntegration", related_name="slack_syncs", on_delete=models.CASCADE
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
"""Return the repo name"""
|
||||
return f"{self.project.name}"
|
||||
|
||||
class Meta:
|
||||
unique_together = ["team_id", "project"]
|
||||
verbose_name = "Slack Project Sync"
|
||||
verbose_name_plural = "Slack Project Syncs"
|
||||
db_table = "slack_project_syncs"
|
||||
ordering = ("-created_at",)
|
||||
@@ -17,6 +17,21 @@ from plane.utils.html_processor import strip_tags
|
||||
|
||||
|
||||
# TODO: Handle identifiers for Bulk Inserts - nk
|
||||
class IssueManager(models.Manager):
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
models.Q(issue_inbox__status=1)
|
||||
| models.Q(issue_inbox__status=-1)
|
||||
| models.Q(issue_inbox__status=2)
|
||||
| models.Q(issue_inbox__isnull=True)
|
||||
)
|
||||
.exclude(archived_at__isnull=False)
|
||||
)
|
||||
|
||||
|
||||
class Issue(ProjectBaseModel):
|
||||
PRIORITY_CHOICES = (
|
||||
("urgent", "Urgent"),
|
||||
@@ -67,6 +82,10 @@ class Issue(ProjectBaseModel):
|
||||
)
|
||||
sort_order = models.FloatField(default=65535)
|
||||
completed_at = models.DateTimeField(null=True)
|
||||
archived_at = models.DateField(null=True)
|
||||
|
||||
objects = models.Manager()
|
||||
issue_objects = IssueManager()
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Issue"
|
||||
@@ -81,11 +100,14 @@ class Issue(ProjectBaseModel):
|
||||
from plane.db.models import State
|
||||
|
||||
default_state = State.objects.filter(
|
||||
project=self.project, default=True
|
||||
~models.Q(name="Triage"), project=self.project, default=True
|
||||
).first()
|
||||
# if there is no default state assign any random state
|
||||
if default_state is None:
|
||||
self.state = State.objects.filter(project=self.project).first()
|
||||
random_state = State.objects.filter(
|
||||
~models.Q(name="Triage"), project=self.project
|
||||
).first()
|
||||
self.state = random_state
|
||||
else:
|
||||
self.state = default_state
|
||||
except ImportError:
|
||||
@@ -94,18 +116,13 @@ class Issue(ProjectBaseModel):
|
||||
try:
|
||||
from plane.db.models import State, PageBlock
|
||||
|
||||
# Get the completed states of the project
|
||||
completed_states = State.objects.filter(
|
||||
group="completed", project=self.project
|
||||
).values_list("pk", flat=True)
|
||||
# Check if the current issue state and completed state id are same
|
||||
if self.state.id in completed_states:
|
||||
if self.state.group == "completed":
|
||||
self.completed_at = timezone.now()
|
||||
# check if there are any page blocks
|
||||
PageBlock.objects.filter(issue_id=self.id).filter().update(
|
||||
completed_at=timezone.now()
|
||||
)
|
||||
|
||||
else:
|
||||
PageBlock.objects.filter(issue_id=self.id).filter().update(
|
||||
completed_at=None
|
||||
@@ -116,7 +133,6 @@ class Issue(ProjectBaseModel):
|
||||
pass
|
||||
if self._state.adding:
|
||||
# Get the maximum display_id value from the database
|
||||
|
||||
last_id = IssueSequence.objects.filter(project=self.project).aggregate(
|
||||
largest=models.Max("sequence")
|
||||
)["largest"]
|
||||
@@ -184,7 +200,7 @@ class IssueAssignee(ProjectBaseModel):
|
||||
|
||||
|
||||
class IssueLink(ProjectBaseModel):
|
||||
title = models.CharField(max_length=255, null=True)
|
||||
title = models.CharField(max_length=255, null=True, blank=True)
|
||||
url = models.URLField()
|
||||
issue = models.ForeignKey(
|
||||
"db.Issue", on_delete=models.CASCADE, related_name="issue_link"
|
||||
@@ -206,8 +222,8 @@ def get_upload_path(instance, filename):
|
||||
|
||||
|
||||
def file_size(value):
|
||||
limit = 5 * 1024 * 1024
|
||||
if value.size > limit:
|
||||
# File limit check is only for cloud hosted
|
||||
if value.size > settings.FILE_SIZE_LIMIT:
|
||||
raise ValidationError("File too large. Size should not exceed 5 MB.")
|
||||
|
||||
|
||||
@@ -272,24 +288,6 @@ class IssueActivity(ProjectBaseModel):
|
||||
return str(self.issue)
|
||||
|
||||
|
||||
class TimelineIssue(ProjectBaseModel):
|
||||
issue = models.ForeignKey(
|
||||
Issue, on_delete=models.CASCADE, related_name="issue_timeline"
|
||||
)
|
||||
sequence_id = models.FloatField(default=1.0)
|
||||
links = models.JSONField(default=dict, blank=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Timeline Issue"
|
||||
verbose_name_plural = "Timeline Issues"
|
||||
db_table = "issue_timelines"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
"""Return project of the project member"""
|
||||
return str(self.issue)
|
||||
|
||||
|
||||
class IssueComment(ProjectBaseModel):
|
||||
comment_stripped = models.TextField(verbose_name="Comment", blank=True)
|
||||
comment_json = models.JSONField(blank=True, default=dict)
|
||||
@@ -303,6 +301,14 @@ class IssueComment(ProjectBaseModel):
|
||||
related_name="comments",
|
||||
null=True,
|
||||
)
|
||||
access = models.CharField(
|
||||
choices=(
|
||||
("INTERNAL", "INTERNAL"),
|
||||
("EXTERNAL", "EXTERNAL"),
|
||||
),
|
||||
default="INTERNAL",
|
||||
max_length=100,
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.comment_stripped = (
|
||||
@@ -396,6 +402,93 @@ class IssueSequence(ProjectBaseModel):
|
||||
ordering = ("-created_at",)
|
||||
|
||||
|
||||
class IssueSubscriber(ProjectBaseModel):
|
||||
issue = models.ForeignKey(
|
||||
Issue, on_delete=models.CASCADE, related_name="issue_subscribers"
|
||||
)
|
||||
subscriber = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="issue_subscribers",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["issue", "subscriber"]
|
||||
verbose_name = "Issue Subscriber"
|
||||
verbose_name_plural = "Issue Subscribers"
|
||||
db_table = "issue_subscribers"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.issue.name} {self.subscriber.email}"
|
||||
|
||||
|
||||
class IssueReaction(ProjectBaseModel):
|
||||
actor = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="issue_reactions",
|
||||
)
|
||||
issue = models.ForeignKey(
|
||||
Issue, on_delete=models.CASCADE, related_name="issue_reactions"
|
||||
)
|
||||
reaction = models.CharField(max_length=20)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["issue", "actor", "reaction"]
|
||||
verbose_name = "Issue Reaction"
|
||||
verbose_name_plural = "Issue Reactions"
|
||||
db_table = "issue_reactions"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.issue.name} {self.actor.email}"
|
||||
|
||||
|
||||
class CommentReaction(ProjectBaseModel):
|
||||
actor = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="comment_reactions",
|
||||
)
|
||||
comment = models.ForeignKey(
|
||||
IssueComment, on_delete=models.CASCADE, related_name="comment_reactions"
|
||||
)
|
||||
reaction = models.CharField(max_length=20)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["comment", "actor", "reaction"]
|
||||
verbose_name = "Comment Reaction"
|
||||
verbose_name_plural = "Comment Reactions"
|
||||
db_table = "comment_reactions"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.issue.name} {self.actor.email}"
|
||||
|
||||
|
||||
class IssueVote(ProjectBaseModel):
|
||||
issue = models.ForeignKey(Issue, on_delete=models.CASCADE, related_name="votes")
|
||||
actor = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="votes"
|
||||
)
|
||||
vote = models.IntegerField(
|
||||
choices=(
|
||||
(-1, "DOWNVOTE"),
|
||||
(1, "UPVOTE"),
|
||||
)
|
||||
)
|
||||
class Meta:
|
||||
unique_together = ["issue", "actor"]
|
||||
verbose_name = "Issue Vote"
|
||||
verbose_name_plural = "Issue Votes"
|
||||
db_table = "issue_votes"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.issue.name} {self.actor.email}"
|
||||
|
||||
|
||||
# TODO: Find a better method to save the model
|
||||
@receiver(post_save, sender=Issue)
|
||||
def create_issue_sequence(sender, instance, created, **kwargs):
|
||||
|
||||
@@ -39,6 +39,8 @@ class Module(ProjectBaseModel):
|
||||
through="ModuleMember",
|
||||
through_fields=("module", "member"),
|
||||
)
|
||||
view_props = models.JSONField(default=dict)
|
||||
sort_order = models.FloatField(default=65535)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["name", "project"]
|
||||
@@ -47,6 +49,17 @@ class Module(ProjectBaseModel):
|
||||
db_table = "modules"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self._state.adding:
|
||||
smallest_sort_order = Module.objects.filter(
|
||||
project=self.project
|
||||
).aggregate(smallest=models.Min("sort_order"))["smallest"]
|
||||
|
||||
if smallest_sort_order is not None:
|
||||
self.sort_order = smallest_sort_order - 10000
|
||||
|
||||
super(Module, self).save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name} {self.start_date} {self.target_date}"
|
||||
|
||||
@@ -85,7 +98,7 @@ class ModuleIssue(ProjectBaseModel):
|
||||
|
||||
|
||||
class ModuleLink(ProjectBaseModel):
|
||||
title = models.CharField(max_length=255, null=True)
|
||||
title = models.CharField(max_length=255, blank=True, null=True)
|
||||
url = models.URLField()
|
||||
module = models.ForeignKey(
|
||||
Module, on_delete=models.CASCADE, related_name="link_module"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user