mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Compare commits
952 Commits
feat/notif
...
dev/settin
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
89a5088f31 | ||
|
|
9789757d2d | ||
|
|
2aee627616 | ||
|
|
26b1e9d5f1 | ||
|
|
79347ec62b | ||
|
|
7b965179d8 | ||
|
|
fc51ffc589 | ||
|
|
96f6e37cc5 | ||
|
|
29774ce84a | ||
|
|
8cbe9c26fc | ||
|
|
7f42566207 | ||
|
|
b60237b676 | ||
|
|
1fe09d369f | ||
|
|
b7757c6b1a | ||
|
|
1a25bacce1 | ||
|
|
6797df239d | ||
|
|
43e7c10eb7 | ||
|
|
bdc9c9c2a8 | ||
|
|
f0c72bf249 | ||
|
|
a8904bfc48 | ||
|
|
b31041726b | ||
|
|
e6f947ad90 | ||
|
|
7963993171 | ||
|
|
00e61a8753 | ||
|
|
733fed76cc | ||
|
|
e78dd4b1c0 | ||
|
|
d479781fce | ||
|
|
c449b46bf4 | ||
|
|
fd6430c3e3 | ||
|
|
6f580ce2d9 | ||
|
|
2748133bd0 | ||
|
|
884b219508 | ||
|
|
162faf8339 | ||
|
|
c291ff05ee | ||
|
|
446981422e | ||
|
|
630e21b954 | ||
|
|
894ffb6c21 | ||
|
|
515dba02d3 | ||
|
|
34bccd7e06 | ||
|
|
79df59f618 | ||
|
|
7676aab773 | ||
|
|
8832d8e00e | ||
|
|
d733a53ea6 | ||
|
|
96862e06ef | ||
|
|
a6567bbce4 | ||
|
|
556b2d2617 | ||
|
|
10037222b6 | ||
|
|
931f9d288a | ||
|
|
20fb79567f | ||
|
|
bd1a850f35 | ||
|
|
206f5744a3 | ||
|
|
faaba45e59 | ||
|
|
f8002852e0 | ||
|
|
2d71377722 | ||
|
|
6ebee05951 | ||
|
|
5a3bac998e | ||
|
|
4096136b44 | ||
|
|
83e0c4ebbd | ||
|
|
df8bdfd5b9 | ||
|
|
da799b5a63 | ||
|
|
621d551c4a | ||
|
|
5a84ed279d | ||
|
|
53e7da08e4 | ||
|
|
9f206331bc | ||
|
|
b56d188a83 | ||
|
|
8d3853b129 | ||
|
|
30d6235108 | ||
|
|
6e461dd8c3 | ||
|
|
86379c51b7 | ||
|
|
f7cc2eca36 | ||
|
|
63d1ad286b | ||
|
|
1412c1c94a | ||
|
|
26de35bd8d | ||
|
|
1986c0dfd4 | ||
|
|
25f3a5b2e4 | ||
|
|
f30b16e9d8 | ||
|
|
93d03f82b4 | ||
|
|
98974fdc50 | ||
|
|
040563d148 | ||
|
|
4de64f112f | ||
|
|
0afb900678 | ||
|
|
baf17a109b | ||
|
|
37bf465fcd | ||
|
|
d8c96536f0 | ||
|
|
b372ccfdb3 | ||
|
|
984b36f45a | ||
|
|
46f307fed5 | ||
|
|
1dce72cb3c | ||
|
|
a6dea3af23 | ||
|
|
6eb0bf4785 | ||
|
|
13389d1b2b | ||
|
|
742143766f | ||
|
|
1ed72c51df | ||
|
|
a03e0c788f | ||
|
|
0c8a867565 | ||
|
|
3a07bb6060 | ||
|
|
bf48d93a25 | ||
|
|
14ac885e55 | ||
|
|
f0335751b3 | ||
|
|
52395d0563 | ||
|
|
ad558833af | ||
|
|
ff258c60fd | ||
|
|
db2a1b8033 | ||
|
|
91878fb3dd | ||
|
|
c233e6e3b6 | ||
|
|
0d2c399555 | ||
|
|
79cad16aba | ||
|
|
41e9d5d7e3 | ||
|
|
992cf79031 | ||
|
|
d48f13416f | ||
|
|
cf19afa707 | ||
|
|
cc26f604aa | ||
|
|
8919b724c5 | ||
|
|
7eeac188d7 | ||
|
|
f639e467f8 | ||
|
|
737fea28c6 | ||
|
|
4c1aee0cfc | ||
|
|
1352c200dd | ||
|
|
dd2ba2ec6f | ||
|
|
c66d76df26 | ||
|
|
7a11161cd0 | ||
|
|
260974b0de | ||
|
|
5efc6993cd | ||
|
|
3c884fd46e | ||
|
|
a582021f2c | ||
|
|
caca2bb548 | ||
|
|
da391064aa | ||
|
|
a9b72fa1d2 | ||
|
|
b0397dfd74 | ||
|
|
02d4d32f7a | ||
|
|
43e42f1896 | ||
|
|
d5fd69354e | ||
|
|
c987c6f308 | ||
|
|
56e4152756 | ||
|
|
7b5ed252ef | ||
|
|
c394a4f64e | ||
|
|
5b808571e5 | ||
|
|
2cda47dc8a | ||
|
|
7f3dbe298c | ||
|
|
0072160891 | ||
|
|
4512651f8b | ||
|
|
f6b95b8d31 | ||
|
|
325fb4a377 | ||
|
|
ba7b7d6f8b | ||
|
|
7249f84e18 | ||
|
|
d63e7cf254 | ||
|
|
36152ea2fa | ||
|
|
1a46c6c399 | ||
|
|
4f09a89f5e | ||
|
|
8c620c4f96 | ||
|
|
d46eb9c59a | ||
|
|
e9321a66e7 | ||
|
|
0121a4ab51 | ||
|
|
548e95c7e0 | ||
|
|
13ead7c314 | ||
|
|
4fcc4b4a01 | ||
|
|
d511799f31 | ||
|
|
490e032ac6 | ||
|
|
1a24f9ec25 | ||
|
|
2cb94b4105 | ||
|
|
ecde7edf09 | ||
|
|
02f4916e49 | ||
|
|
1be82814fc | ||
|
|
10e35d9a06 | ||
|
|
2d64caef90 | ||
|
|
80e6d7e1ea | ||
|
|
b7d5a42d45 | ||
|
|
2b1e1557ca | ||
|
|
705b33377c | ||
|
|
49fd4427c8 | ||
|
|
bdbb64f385 | ||
|
|
98716859d5 | ||
|
|
8072bbb559 | ||
|
|
442c83eea2 | ||
|
|
cb533849e8 | ||
|
|
59c52023fb | ||
|
|
08ca016f65 | ||
|
|
1c2ea6da5e | ||
|
|
8b7b5c54b9 | ||
|
|
52474715de | ||
|
|
dcf81e28e4 | ||
|
|
050406b8a4 | ||
|
|
8eaac60aa5 | ||
|
|
7edaa49c21 | ||
|
|
8cc61bc427 | ||
|
|
fc82d6fc23 | ||
|
|
080b5a29ae | ||
|
|
9ee3fb9c6c | ||
|
|
b0a24ab57b | ||
|
|
3e706f9653 | ||
|
|
4e86110123 | ||
|
|
6bebb8a93b | ||
|
|
c8f98a9bc2 | ||
|
|
55b2927a17 | ||
|
|
597ea26d7b | ||
|
|
4aad35e007 | ||
|
|
d95ea463b2 | ||
|
|
993b388f00 | ||
|
|
a49f00bd39 | ||
|
|
ca2da41dd2 | ||
|
|
a6d741e784 | ||
|
|
cea39c758e | ||
|
|
d72d3da6de | ||
|
|
07d548ea43 | ||
|
|
08f7ac6da7 | ||
|
|
fcf9851ee4 | ||
|
|
f6c1dad342 | ||
|
|
4c54d826ba | ||
|
|
1786a395dc | ||
|
|
d7a36f5b04 | ||
|
|
edc5a38973 | ||
|
|
38421e8106 | ||
|
|
05a76c5ee3 | ||
|
|
c739b7235d | ||
|
|
914657334d | ||
|
|
4b03802d22 | ||
|
|
9f1fd2327a | ||
|
|
d78b4dccf3 | ||
|
|
9bddd2eb67 | ||
|
|
2dd46be287 | ||
|
|
1b8f6e2129 | ||
|
|
5f014d204c | ||
|
|
fda0a6791f | ||
|
|
082e48c9cf | ||
|
|
c0793ec8a5 | ||
|
|
0b8367a262 | ||
|
|
85a471305a | ||
|
|
861ff4ae94 | ||
|
|
3a44d4bf35 | ||
|
|
15f621ad91 | ||
|
|
9b96e297b3 | ||
|
|
704fe155af | ||
|
|
b3b79c51bb | ||
|
|
c270c8689f | ||
|
|
0ec0ca133a | ||
|
|
98367f540c | ||
|
|
b4f32ced43 | ||
|
|
f54a9502f8 | ||
|
|
baa2621fe2 | ||
|
|
3197dd484c | ||
|
|
e9cc578cca | ||
|
|
90776237f3 | ||
|
|
d689c63368 | ||
|
|
4b51d9ed6c | ||
|
|
b0c1af2b25 | ||
|
|
123634f5e8 | ||
|
|
4bd73630d1 | ||
|
|
732e33fefc | ||
|
|
a49fedf69e | ||
|
|
399af30b9a | ||
|
|
0b79f8687e | ||
|
|
46d34263f0 | ||
|
|
98b1a078de | ||
|
|
e496cec49f | ||
|
|
fc99615875 | ||
|
|
8cd13e94d5 | ||
|
|
7f06d5a30d | ||
|
|
5a4e9f42f0 | ||
|
|
ceb878e72f | ||
|
|
651b252c23 | ||
|
|
1fc5d2bd45 | ||
|
|
cb80c98413 | ||
|
|
a361dae185 | ||
|
|
e684bda8b2 | ||
|
|
d80a593520 | ||
|
|
c6e021d41f | ||
|
|
0a8b99a074 | ||
|
|
930a20ea7f | ||
|
|
0a95713911 | ||
|
|
892a30c3a8 | ||
|
|
58ea4d6ec9 | ||
|
|
d88eb09fad | ||
|
|
0f31585620 | ||
|
|
0252734e00 | ||
|
|
4a380ae242 | ||
|
|
5ac2c270f7 | ||
|
|
af61054c0b | ||
|
|
67922f9c5d | ||
|
|
85147db85f | ||
|
|
62035f3bad | ||
|
|
1c43d313d1 | ||
|
|
77c1b90e6b | ||
|
|
64af5e2e75 | ||
|
|
4ec2811388 | ||
|
|
9482cc3a73 | ||
|
|
18bcf55f78 | ||
|
|
ea2c1e2d06 | ||
|
|
48c65c9c95 | ||
|
|
d0a51d75f6 | ||
|
|
7bdca9c73a | ||
|
|
05920a72a5 | ||
|
|
7a3b556ae0 | ||
|
|
c2f0ae1ec5 | ||
|
|
fb6f6454df | ||
|
|
1e9149d872 | ||
|
|
eacf543439 | ||
|
|
981e2aafdd | ||
|
|
4017f6bc55 | ||
|
|
db1bcdb54f | ||
|
|
cecdf890de | ||
|
|
d9bd07886f | ||
|
|
0d0cf3052a | ||
|
|
6d463ded1c | ||
|
|
de33e6775f | ||
|
|
c96960955b | ||
|
|
7eba4a5032 | ||
|
|
c56aacdd13 | ||
|
|
3ff47b38f4 | ||
|
|
25a973aaf9 | ||
|
|
ebf79ee2aa | ||
|
|
f001852f69 | ||
|
|
6b5fe0fff1 | ||
|
|
f73135c504 | ||
|
|
f751619759 | ||
|
|
adf49782ba | ||
|
|
843ba5bb63 | ||
|
|
8d4ac9b430 | ||
|
|
7c125075b1 | ||
|
|
4cab00ec79 | ||
|
|
f1879a404d | ||
|
|
26bb51e686 | ||
|
|
1b28125919 | ||
|
|
2a770e4a95 | ||
|
|
fbf88c3196 | ||
|
|
459999e8c9 | ||
|
|
6cb4b222d0 | ||
|
|
a048e513b7 | ||
|
|
4503810aeb | ||
|
|
ec91a0d2e5 | ||
|
|
60a69e28e3 | ||
|
|
34af666b5f | ||
|
|
6afbd3f1ba | ||
|
|
32d2f912f7 | ||
|
|
9dd22f07f4 | ||
|
|
7404fe71b1 | ||
|
|
dc2b4de95e | ||
|
|
62ba8d5e9f | ||
|
|
7b453dd6b5 | ||
|
|
191aecaaac | ||
|
|
e00ae0b48a | ||
|
|
a243bb6a15 | ||
|
|
b3be363b00 | ||
|
|
42d3919459 | ||
|
|
5298f1e53c | ||
|
|
2d8cbccfbc | ||
|
|
3a6d72e4b6 | ||
|
|
698b42768e | ||
|
|
a187e7765c | ||
|
|
4c333d5767 | ||
|
|
b317a14983 | ||
|
|
608ba9d5cb | ||
|
|
6e0999c35a | ||
|
|
52b57b1e37 | ||
|
|
88a35efa06 | ||
|
|
ab028a317b | ||
|
|
d38594376b | ||
|
|
dae8ca6053 | ||
|
|
6d3bd78052 | ||
|
|
1ad99873a9 | ||
|
|
7db78594dc | ||
|
|
5e8d523ed4 | ||
|
|
d47efaa0f0 | ||
|
|
de7a672b79 | ||
|
|
0e96eddb57 | ||
|
|
afa10d7195 | ||
|
|
68c8741f93 | ||
|
|
e8d303dd10 | ||
|
|
c9a6380636 | ||
|
|
1aadbee7e2 | ||
|
|
02060f654c | ||
|
|
771ca585db | ||
|
|
daa0b16960 | ||
|
|
0005ff5f99 | ||
|
|
0c7b7c4e94 | ||
|
|
4d835c5b4a | ||
|
|
c7092edb61 | ||
|
|
73afb8f4d8 | ||
|
|
978909c021 | ||
|
|
de9f34cac3 | ||
|
|
e3793f4464 | ||
|
|
1621125f6d | ||
|
|
bd077e6500 | ||
|
|
60ae940d40 | ||
|
|
cdfff12f4f | ||
|
|
e01a0d20fe | ||
|
|
63c4792e70 | ||
|
|
ce562fa3ea | ||
|
|
a6a0eb9774 | ||
|
|
d603c1e8f0 | ||
|
|
405ef9314f | ||
|
|
926d2ae0a0 | ||
|
|
11258686ad | ||
|
|
f6b92fc953 | ||
|
|
79bf7d4c0c | ||
|
|
5d331477ef | ||
|
|
3d72279edb | ||
|
|
c107b36d34 | ||
|
|
ccffbe1b4e | ||
|
|
9bfdcff44d | ||
|
|
b274a21ca5 | ||
|
|
32d945be0d | ||
|
|
eda4da8aed | ||
|
|
759a604cb8 | ||
|
|
6659cfc8b0 | ||
|
|
a53b428bbd | ||
|
|
4e0e02522f | ||
|
|
f983d787b4 | ||
|
|
87abf3ccb1 | ||
|
|
d0f6ca3bac | ||
|
|
af73bbe718 | ||
|
|
9033ceb03c | ||
|
|
9bac7cb036 | ||
|
|
32d08570e7 | ||
|
|
1b1ed37405 | ||
|
|
42d38f7531 | ||
|
|
61672f47ac | ||
|
|
23e62c83eb | ||
|
|
e58b76c8a6 | ||
|
|
4ce01ca4f8 | ||
|
|
a34b0b059d | ||
|
|
164e0b9301 | ||
|
|
5a91031243 | ||
|
|
47bec7704b | ||
|
|
b9c935092a | ||
|
|
3a2a329000 | ||
|
|
8e9a4dca78 | ||
|
|
2bc05cc7b7 | ||
|
|
14fe545709 | ||
|
|
bc99ec0f1d | ||
|
|
cdb888c23e | ||
|
|
2186db8bba | ||
|
|
9bff10de6d | ||
|
|
cc63f67654 | ||
|
|
b8dd9ca729 | ||
|
|
6867154963 | ||
|
|
73b360c2fd | ||
|
|
7bb73b74ba | ||
|
|
991258084e | ||
|
|
1a37668f0b | ||
|
|
4447a4b519 | ||
|
|
7842c4b2ea | ||
|
|
8de93d0081 | ||
|
|
5b228bd1eb | ||
|
|
ad8a011bb9 | ||
|
|
49d0b3f4a1 | ||
|
|
1872dff00d | ||
|
|
faa6a2bcbc | ||
|
|
6d52707ff5 | ||
|
|
8ba482bc9c | ||
|
|
5989f2476a | ||
|
|
8ea6dd4e84 | ||
|
|
39bc975994 | ||
|
|
866eead35f | ||
|
|
9c3510851d | ||
|
|
81436902a3 | ||
|
|
d26aa1b2da | ||
|
|
b47c7d363f | ||
|
|
85f797058d | ||
|
|
1655d0cb1c | ||
|
|
58562dc4b7 | ||
|
|
2ad46d7bfa | ||
|
|
4f0cac37db | ||
|
|
b46a7481ae | ||
|
|
f11ae00201 | ||
|
|
c5612ee7a3 | ||
|
|
0dd336aec8 | ||
|
|
4b364f72b5 | ||
|
|
6d13332818 | ||
|
|
ac4127c93d | ||
|
|
60c3d1a6e9 | ||
|
|
70ed3c1fdf | ||
|
|
b40059ea21 | ||
|
|
90276073cd | ||
|
|
8d5ff1a628 | ||
|
|
065a4a3cf7 | ||
|
|
928ae775f4 | ||
|
|
80bcca71ff | ||
|
|
3db0ec819a | ||
|
|
900a4fcb0e | ||
|
|
19c65b26d6 | ||
|
|
71394d3316 | ||
|
|
414ea7371d | ||
|
|
9423472838 | ||
|
|
729eabdd3f | ||
|
|
03f204a71c | ||
|
|
faf5a274cb | ||
|
|
2c9c8d5a89 | ||
|
|
5e02ad8104 | ||
|
|
f554ad95e9 | ||
|
|
59b69d3072 | ||
|
|
ccbb54bb87 | ||
|
|
2b84b7c18d | ||
|
|
8f46492c42 | ||
|
|
58e23304a7 | ||
|
|
dc26e1ea50 | ||
|
|
f583789584 | ||
|
|
9d9c1a86bf | ||
|
|
4559a1bd5d | ||
|
|
0de62b3b0c | ||
|
|
d3a9a764dc | ||
|
|
4ea52302ba | ||
|
|
1e152c666c | ||
|
|
20e36194b4 | ||
|
|
874d6e951b | ||
|
|
63d799310b | ||
|
|
abe8df4eca | ||
|
|
674347c99e | ||
|
|
0196fee7e3 | ||
|
|
c1102180e6 | ||
|
|
a6cd0809fa | ||
|
|
2155a336ed | ||
|
|
1732945ec6 | ||
|
|
71c8f79276 | ||
|
|
650c0c3b78 | ||
|
|
f71a62f142 | ||
|
|
54d781ef91 | ||
|
|
441e83eba6 | ||
|
|
74bf9062b4 | ||
|
|
8a95a41100 | ||
|
|
c03550656a | ||
|
|
82a48d4805 | ||
|
|
f4fa2e011a | ||
|
|
42ece0d784 | ||
|
|
1e9f0823f8 | ||
|
|
4ba3ef5c24 | ||
|
|
c6d9ace6a2 | ||
|
|
0d4bcd2758 | ||
|
|
3a0d96a48d | ||
|
|
eab1d9329b | ||
|
|
099bce87b5 | ||
|
|
b496a62540 | ||
|
|
af929ab741 | ||
|
|
38b7f4382f | ||
|
|
320608ea73 | ||
|
|
5e00ffee05 | ||
|
|
54527cc2bb | ||
|
|
6c6b81bea7 | ||
|
|
f5a076e9a9 | ||
|
|
17aff1f369 | ||
|
|
761a1eb41a | ||
|
|
426f65898b | ||
|
|
23f5d5d172 | ||
|
|
2e5ade05fe | ||
|
|
168e79d6df | ||
|
|
d8bbdc14ac | ||
|
|
fd0efb0242 | ||
|
|
38a5623c43 | ||
|
|
90cf39cf59 | ||
|
|
b2a41d3bf6 | ||
|
|
1cf5e8d80a | ||
|
|
1d30a9a0a8 | ||
|
|
91c10930a4 | ||
|
|
5ad5da4fd7 | ||
|
|
e1ad385688 | ||
|
|
abcdebef85 | ||
|
|
3a41ec7442 | ||
|
|
8581226e60 | ||
|
|
c65bbf865d | ||
|
|
b2e5760391 | ||
|
|
8a3b65a740 | ||
|
|
293d90ddda | ||
|
|
485e56bcdf | ||
|
|
6e7701d854 | ||
|
|
a1acd2772e | ||
|
|
47abe9db5e | ||
|
|
a61e8370b5 | ||
|
|
9f420a00d7 | ||
|
|
a9ff4b8c93 | ||
|
|
2b168edd99 | ||
|
|
93fa093a79 | ||
|
|
fd8c368c97 | ||
|
|
0525e7d6b3 | ||
|
|
1530993b84 | ||
|
|
d8b8c903f2 | ||
|
|
bf0d0503b2 | ||
|
|
fe1b0c1d73 | ||
|
|
ab4a17c178 | ||
|
|
38934e8b99 | ||
|
|
d18ac83909 | ||
|
|
802e6b3e8e | ||
|
|
489ef6a3cc | ||
|
|
bce8cae0da | ||
|
|
f97597958a | ||
|
|
7fca01d8c9 | ||
|
|
529ab19747 | ||
|
|
6f397710ce | ||
|
|
e6bd6b6a8c | ||
|
|
2d1406953e | ||
|
|
a8fdd42cb9 | ||
|
|
9d3952006b | ||
|
|
b75473a684 | ||
|
|
561fb9815b | ||
|
|
2cc67f6498 | ||
|
|
2f5bd58c61 | ||
|
|
e833fccf61 | ||
|
|
eee6658cc2 | ||
|
|
62ba9abdb4 | ||
|
|
46b138eb0b | ||
|
|
68b438ab1a | ||
|
|
59bdf222f5 | ||
|
|
eb50ade5e3 | ||
|
|
b406a70e72 | ||
|
|
85a08e4abd | ||
|
|
aa2e1697b0 | ||
|
|
b02417120b | ||
|
|
d040394826 | ||
|
|
f7682c57ba | ||
|
|
3beab9de6f | ||
|
|
9bb6254515 | ||
|
|
ae052f1890 | ||
|
|
cfc7049343 | ||
|
|
41e55dff85 | ||
|
|
0bccb63a9f | ||
|
|
2eb956e97e | ||
|
|
d470adf262 | ||
|
|
cebc8bdc8d | ||
|
|
64b5ba196f | ||
|
|
13d21e752d | ||
|
|
8d5018318d | ||
|
|
0fbdc0b157 | ||
|
|
2f39181eb7 | ||
|
|
d825dc5579 | ||
|
|
1f8117c987 | ||
|
|
125e9090ea | ||
|
|
02ac4cee22 | ||
|
|
93164755e2 | ||
|
|
6344f6f562 | ||
|
|
b67e30fd9c | ||
|
|
93fec2c678 | ||
|
|
c3c6ba9e34 | ||
|
|
d74ec7bda9 | ||
|
|
e593a8d4bd | ||
|
|
abb8782c44 | ||
|
|
0afd72db95 | ||
|
|
65295f6c6f | ||
|
|
5b6b43fb83 | ||
|
|
f8497125db | ||
|
|
b24622e5ef | ||
|
|
10dface85d | ||
|
|
2b6debaa3e | ||
|
|
1750ba344b | ||
|
|
550473bb02 | ||
|
|
fde978861c | ||
|
|
f44d142f2c | ||
|
|
1ded8f486f | ||
|
|
2c43a15515 | ||
|
|
0979acc1a4 | ||
|
|
9003c58d89 | ||
|
|
55e2f00ffe | ||
|
|
08382f88b4 | ||
|
|
72419447ec | ||
|
|
b554087b1f | ||
|
|
07717e9a93 | ||
|
|
df46a45afc | ||
|
|
e1ae0d3b56 | ||
|
|
daa8f7d79b | ||
|
|
d2cdaaccb9 | ||
|
|
6774eddb66 | ||
|
|
8ccc1b3fcc | ||
|
|
5e76e03a55 | ||
|
|
77fb50faa4 | ||
|
|
5ddfee12bc | ||
|
|
f7a596c113 | ||
|
|
f73239be92 | ||
|
|
dc2438b2d3 | ||
|
|
ddd3301d17 | ||
|
|
816f00d956 | ||
|
|
70e2509d52 | ||
|
|
1a9faa025a | ||
|
|
feba1cc4d0 | ||
|
|
079a5b28d8 | ||
|
|
c6eea9c7a9 | ||
|
|
5f5790ebf9 | ||
|
|
a3d99100ee | ||
|
|
ac6d2b0139 | ||
|
|
7becec4ee9 | ||
|
|
cd5e5b96da | ||
|
|
ad4cdcc512 | ||
|
|
6617049983 | ||
|
|
abec46a725 | ||
|
|
88e987b902 | ||
|
|
785a6e8871 | ||
|
|
762ef422ca | ||
|
|
e2b5657c3e | ||
|
|
dbb53a663e | ||
|
|
5c964d144a | ||
|
|
289e81d6eb | ||
|
|
def10af1e2 | ||
|
|
edaeae1b69 | ||
|
|
e06ee25800 | ||
|
|
be86a7d38e | ||
|
|
0a1483c482 | ||
|
|
11abd3cadf | ||
|
|
085cd1960e | ||
|
|
2769a73898 | ||
|
|
8373f20944 | ||
|
|
f562fcd466 | ||
|
|
8c8668a3e6 | ||
|
|
9ce85cdf21 | ||
|
|
1c6cdb8328 | ||
|
|
005b42cb8d | ||
|
|
be062ccd34 | ||
|
|
c9d0c5353d | ||
|
|
2a6eb5fe23 | ||
|
|
d9ccce41bc | ||
|
|
3db69a3a71 | ||
|
|
faa50b0bbb | ||
|
|
1991e09035 | ||
|
|
4fcd081d27 | ||
|
|
5f1209f1db | ||
|
|
88e5a05253 | ||
|
|
981acc81c1 | ||
|
|
cf306ee605 | ||
|
|
9df0ba6e3a | ||
|
|
b6744dcd29 | ||
|
|
a164dfd532 | ||
|
|
2b46e5f977 | ||
|
|
9ff8994c0e | ||
|
|
9b4aebc385 | ||
|
|
97c3fb40e7 | ||
|
|
5aad6c71da | ||
|
|
a1ae338c37 | ||
|
|
c16b0daa22 | ||
|
|
9a29896291 | ||
|
|
a66dcb9419 | ||
|
|
87a920174e | ||
|
|
584192faba | ||
|
|
b61adbed4b | ||
|
|
7434800999 | ||
|
|
3488001197 | ||
|
|
9828d2332a | ||
|
|
78095e3823 | ||
|
|
f41086fd26 | ||
|
|
0866dc3494 | ||
|
|
2ced7e4911 | ||
|
|
9f69fe6060 | ||
|
|
6ea15ced02 | ||
|
|
11525f26d0 | ||
|
|
f3bd1691ce | ||
|
|
d83a76a3aa | ||
|
|
2cd431b4a4 | ||
|
|
d22e4b8212 | ||
|
|
0e0e09c4fd | ||
|
|
a8816ef473 | ||
|
|
d315a24c1c | ||
|
|
e73a4bef4e | ||
|
|
d9339b8f8e | ||
|
|
a66a0680df | ||
|
|
98c7453741 | ||
|
|
1a5faca77c | ||
|
|
6e7fa1a39c | ||
|
|
7a6e742362 | ||
|
|
8a9ff31009 | ||
|
|
d310b8f86f | ||
|
|
4e297d92f3 | ||
|
|
e48147f87e | ||
|
|
85a7a7df2b | ||
|
|
92b22dc99e | ||
|
|
cb4d294608 | ||
|
|
df8504e6f7 | ||
|
|
7287c27b73 | ||
|
|
cc2e6182b6 | ||
|
|
40fd7790eb | ||
|
|
d733fb92cd | ||
|
|
1ae78e55c9 | ||
|
|
ff3f1897bc | ||
|
|
f42f2465a9 | ||
|
|
7ad0466d65 | ||
|
|
e8f748a67d | ||
|
|
81b1405448 | ||
|
|
98d9763f8e | ||
|
|
c9498fa54d | ||
|
|
0586d30a33 | ||
|
|
406b323e8e | ||
|
|
47838a506a | ||
|
|
d9ce042dff | ||
|
|
4fb11cb388 | ||
|
|
6769d1139e | ||
|
|
89e7975821 | ||
|
|
89bf24bd64 | ||
|
|
922735e5f2 | ||
|
|
ed75163ec4 | ||
|
|
8e0124be91 | ||
|
|
c98edd4a91 | ||
|
|
35bb71303e | ||
|
|
e5a3bec28c | ||
|
|
30054f71a8 | ||
|
|
f40eb1add1 | ||
|
|
e0affa21c4 | ||
|
|
b14c70df71 | ||
|
|
4c54ca5494 | ||
|
|
10f145f85c | ||
|
|
8930840a76 | ||
|
|
e930f8cc7b | ||
|
|
865698bcb4 | ||
|
|
a3678b490a | ||
|
|
5117859142 | ||
|
|
05c923a97f | ||
|
|
bedc3ab5a1 | ||
|
|
0cc4468091 | ||
|
|
5cfea3948f | ||
|
|
c947a6dd64 | ||
|
|
c54b8b9a15 | ||
|
|
0b86080166 | ||
|
|
0e352b7bcb | ||
|
|
bc8be73d6c | ||
|
|
d6f3c2515a | ||
|
|
fd9dcfa2ec | ||
|
|
39274fd5fa | ||
|
|
3d7fe40035 | ||
|
|
ec62308195 | ||
|
|
9c28011ea5 | ||
|
|
10059b2150 | ||
|
|
6fe99c7f3e | ||
|
|
2229d8d828 | ||
|
|
ad410d134f | ||
|
|
9b531aca47 | ||
|
|
2bb842367f | ||
|
|
916fca53ac | ||
|
|
7763cca9a2 | ||
|
|
3ad3cc77f9 | ||
|
|
998fab80b5 | ||
|
|
679c97bbe3 | ||
|
|
c87d70195d | ||
|
|
c2327fa538 | ||
|
|
bc076e69f7 | ||
|
|
1db9030f20 | ||
|
|
737ac33d5d | ||
|
|
fd17c249fd | ||
|
|
afce027bf3 | ||
|
|
6db1db55e9 | ||
|
|
08a025f67c | ||
|
|
8a2cc6f919 | ||
|
|
29b04bb3ef | ||
|
|
f3b09a13b8 | ||
|
|
e83ef7332d | ||
|
|
8ff834c328 | ||
|
|
4ee161bae2 | ||
|
|
27402b52b6 | ||
|
|
479dfc17f5 | ||
|
|
8e70a036b7 | ||
|
|
73b38f4db9 | ||
|
|
e357283789 | ||
|
|
2ce7914b7a | ||
|
|
fe60771943 | ||
|
|
464c13fcd0 | ||
|
|
a7b5ad55ab | ||
|
|
ccbcfecc6d | ||
|
|
7669ee8755 | ||
|
|
fdb7da4d45 | ||
|
|
ff6690afd2 | ||
|
|
f9c3f02d15 | ||
|
|
6c2600efa7 | ||
|
|
0e5c0fe31e | ||
|
|
4424d67073 | ||
|
|
26eb3b59ce | ||
|
|
4d909fbef3 | ||
|
|
89210accae | ||
|
|
7c5c02bba6 | ||
|
|
11faf3f810 | ||
|
|
30ea1adf61 | ||
|
|
546aa40aa3 | ||
|
|
78669363b1 | ||
|
|
bca749986a | ||
|
|
229938114d | ||
|
|
c5e418ab47 | ||
|
|
ecdd1f1d03 | ||
|
|
e687cd6f6a | ||
|
|
9b6721790f | ||
|
|
05df65577a | ||
|
|
52d21b9dda | ||
|
|
51f10d5f36 | ||
|
|
9275e6f373 | ||
|
|
4aef8c2242 | ||
|
|
780573dadd | ||
|
|
5e625ab132 | ||
|
|
34123681cf | ||
|
|
c72ff782ac | ||
|
|
6eb72507a5 | ||
|
|
26b18b431b | ||
|
|
1bae9289f5 | ||
|
|
5c5bcb33e3 | ||
|
|
bed5f76082 | ||
|
|
124c2f772e | ||
|
|
b38898753f | ||
|
|
86a120f11e | ||
|
|
da603dc3f8 | ||
|
|
2f3970f641 | ||
|
|
d759438ebd | ||
|
|
0102f1d693 | ||
|
|
53e443d816 | ||
|
|
98b9957753 | ||
|
|
509af4662d | ||
|
|
d1f2a819f5 | ||
|
|
a42bff675b | ||
|
|
c71a2137e6 | ||
|
|
5e1f0a2604 | ||
|
|
ccab382d7d | ||
|
|
a7aa78a349 | ||
|
|
5ae963c451 | ||
|
|
07c097c9ad | ||
|
|
fc92d7d1a0 | ||
|
|
9ba8f5c21f | ||
|
|
059b8c793a | ||
|
|
93da220c4a | ||
|
|
0feab162ff | ||
|
|
55a1291b1d | ||
|
|
b12a00cf4a | ||
|
|
52ee8c5615 | ||
|
|
68108c9fe9 | ||
|
|
88fbe85087 | ||
|
|
9b423cea4b | ||
|
|
9d891ecce1 | ||
|
|
16a7bd3bda | ||
|
|
6e9f3971a5 | ||
|
|
dddfeb17b7 | ||
|
|
538d67dbd9 | ||
|
|
090870b03e | ||
|
|
0a56a30ab2 | ||
|
|
8df1648329 | ||
|
|
b69c4b6b30 | ||
|
|
e0181342c0 | ||
|
|
f9f8b5c3d9 | ||
|
|
5fadf53580 | ||
|
|
da6ecd439c | ||
|
|
7914bcf486 | ||
|
|
c9a5893c3f | ||
|
|
7361657660 | ||
|
|
60e96bcb72 | ||
|
|
3f3fb373cc | ||
|
|
a829e6fc40 | ||
|
|
864e592bc5 | ||
|
|
411a661abd | ||
|
|
61ad6b9e0e | ||
|
|
2ff49c93bd | ||
|
|
120d06983d | ||
|
|
c9cbca5ec8 | ||
|
|
275942a246 | ||
|
|
a1b09fcbc6 | ||
|
|
26f0e9da00 | ||
|
|
fd3f6ab7a4 | ||
|
|
d05d814efe | ||
|
|
6394f517a2 | ||
|
|
379d258375 | ||
|
|
6a27827c16 | ||
|
|
206ca98307 | ||
|
|
473c32d3d4 | ||
|
|
124d1c30aa | ||
|
|
fd7274ba1c | ||
|
|
46d93525be | ||
|
|
dbbce439ff | ||
|
|
4c0857233e |
17
.deepsource.toml
Normal file
17
.deepsource.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
version = 1
|
||||
|
||||
[[analyzers]]
|
||||
name = "shell"
|
||||
|
||||
[[analyzers]]
|
||||
name = "javascript"
|
||||
|
||||
[analyzers.meta]
|
||||
plugins = ["react"]
|
||||
environment = ["nodejs"]
|
||||
|
||||
[[analyzers]]
|
||||
name = "python"
|
||||
|
||||
[analyzers.meta]
|
||||
runtime_version = "3.x.x"
|
||||
58
.env.example
58
.env.example
@@ -1,32 +1,3 @@
|
||||
# Frontend
|
||||
# Extra image domains that need to be added for Next Image
|
||||
NEXT_PUBLIC_EXTRA_IMAGE_DOMAINS=
|
||||
# Google Client ID for Google OAuth
|
||||
NEXT_PUBLIC_GOOGLE_CLIENTID=""
|
||||
# Github ID for Github OAuth
|
||||
NEXT_PUBLIC_GITHUB_ID=""
|
||||
# Github App Name for GitHub Integration
|
||||
NEXT_PUBLIC_GITHUB_APP_NAME=""
|
||||
# Sentry DSN for error monitoring
|
||||
NEXT_PUBLIC_SENTRY_DSN=""
|
||||
# Enable/Disable OAUTH - default 0 for selfhosted instance
|
||||
NEXT_PUBLIC_ENABLE_OAUTH=0
|
||||
# Enable/Disable sentry
|
||||
NEXT_PUBLIC_ENABLE_SENTRY=0
|
||||
# Enable/Disable session recording
|
||||
NEXT_PUBLIC_ENABLE_SESSION_RECORDER=0
|
||||
# Enable/Disable event tracking
|
||||
NEXT_PUBLIC_TRACK_EVENTS=0
|
||||
# Slack for Slack Integration
|
||||
NEXT_PUBLIC_SLACK_CLIENT_ID=""
|
||||
|
||||
# Backend
|
||||
# Debug value for api server use it as 0 for production use
|
||||
DEBUG=0
|
||||
|
||||
# Error logs
|
||||
SENTRY_DSN=""
|
||||
|
||||
# Database Settings
|
||||
PGUSER="plane"
|
||||
PGPASSWORD="plane"
|
||||
@@ -39,15 +10,6 @@ REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||
|
||||
# Email Settings
|
||||
EMAIL_HOST=""
|
||||
EMAIL_HOST_USER=""
|
||||
EMAIL_HOST_PASSWORD=""
|
||||
EMAIL_PORT=587
|
||||
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
||||
EMAIL_USE_TLS="1"
|
||||
EMAIL_USE_SSL="0"
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
@@ -59,25 +21,15 @@ AWS_S3_BUCKET_NAME="uploads"
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
|
||||
# GPT settings
|
||||
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
||||
OPENAI_API_KEY="sk-" # add your openai key here
|
||||
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
||||
|
||||
# Github
|
||||
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
||||
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||
OPENAI_API_KEY="sk-" # deprecated
|
||||
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||
|
||||
# Settings related to Docker
|
||||
DOCKERIZED=1
|
||||
DOCKERIZED=1 # deprecated
|
||||
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
|
||||
# Default Creds
|
||||
DEFAULT_EMAIL="captain@plane.so"
|
||||
DEFAULT_PASSWORD="password123"
|
||||
|
||||
# SignUps
|
||||
ENABLE_SIGNUP="1"
|
||||
# Auto generated and Required that will be generated from setup.sh
|
||||
|
||||
@@ -4,7 +4,7 @@ module.exports = {
|
||||
extends: ["custom"],
|
||||
settings: {
|
||||
next: {
|
||||
rootDir: ["apps/*"],
|
||||
rootDir: ["web/", "space/"],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
213
.github/workflows/build-branch.yml
vendored
Normal file
213
.github/workflows/build-branch.yml
vendored
Normal file
@@ -0,0 +1,213 @@
|
||||
|
||||
name: Branch Build
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
branches:
|
||||
- master
|
||||
- release
|
||||
- qa
|
||||
- develop
|
||||
|
||||
env:
|
||||
TARGET_BRANCH: ${{ github.event.pull_request.base.ref }}
|
||||
|
||||
jobs:
|
||||
branch_build_and_push:
|
||||
if: ${{ (github.event_name == 'pull_request' && github.event.action =='closed' && github.event.pull_request.merged == true) }}
|
||||
name: Build-Push Web/Space/API/Proxy Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
# - name: Set Target Branch Name on PR close
|
||||
# if: ${{ github.event_name == 'pull_request' && github.event.action =='closed' }}
|
||||
# run: echo "TARGET_BRANCH=${{ github.event.pull_request.base.ref }}" >> $GITHUB_ENV
|
||||
|
||||
# - name: Set Target Branch Name on other than PR close
|
||||
# if: ${{ github.event_name == 'push' }}
|
||||
# run: echo "TARGET_BRANCH=${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
|
||||
- uses: ASzc/change-string-case-action@v2
|
||||
id: gh_branch_upper_lower
|
||||
with:
|
||||
string: ${{env.TARGET_BRANCH}}
|
||||
|
||||
- uses: mad9000/actions-find-and-replace-string@2
|
||||
id: gh_branch_replace_slash
|
||||
with:
|
||||
source: ${{ steps.gh_branch_upper_lower.outputs.lowercase }}
|
||||
find: '/'
|
||||
replace: '-'
|
||||
|
||||
- uses: mad9000/actions-find-and-replace-string@2
|
||||
id: gh_branch_replace_dot
|
||||
with:
|
||||
source: ${{ steps.gh_branch_replace_slash.outputs.value }}
|
||||
find: '.'
|
||||
replace: ''
|
||||
|
||||
- uses: mad9000/actions-find-and-replace-string@2
|
||||
id: gh_branch_clean
|
||||
with:
|
||||
source: ${{ steps.gh_branch_replace_dot.outputs.value }}
|
||||
find: '_'
|
||||
replace: ''
|
||||
- name: Uploading Proxy Source
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: proxy-src-code
|
||||
path: ./nginx
|
||||
- name: Uploading Backend Source
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: backend-src-code
|
||||
path: ./apiserver
|
||||
- name: Uploading Web Source
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: web-src-code
|
||||
path: |
|
||||
./
|
||||
!./apiserver
|
||||
!./nginx
|
||||
!./deploy
|
||||
!./space
|
||||
|
||||
- name: Uploading Space Source
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: space-src-code
|
||||
path: |
|
||||
./
|
||||
!./apiserver
|
||||
!./nginx
|
||||
!./deploy
|
||||
!./web
|
||||
outputs:
|
||||
gh_branch_name: ${{ steps.gh_branch_clean.outputs.value }}
|
||||
|
||||
branch_build_push_frontend:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [ branch_build_and_push ]
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.5.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.1.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Downloading Web Source Code
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: web-src-code
|
||||
|
||||
- name: Build and Push Frontend to Docker Container Registry
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: .
|
||||
file: ./web/Dockerfile.web
|
||||
platforms: linux/amd64
|
||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_space:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [ branch_build_and_push ]
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.5.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.1.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Downloading Space Source Code
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: space-src-code
|
||||
|
||||
- name: Build and Push Space to Docker Hub
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: .
|
||||
file: ./space/Dockerfile.space
|
||||
platforms: linux/amd64
|
||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_backend:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [ branch_build_and_push ]
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.5.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.1.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Downloading Backend Source Code
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: backend-src-code
|
||||
|
||||
- name: Build and Push Backend to Docker Hub
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.api
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_proxy:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [ branch_build_and_push ]
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.5.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.1.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Downloading Proxy Source Code
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: proxy-src-code
|
||||
|
||||
- name: Build and Push Plane-Proxy to Docker Hub
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
48
.github/workflows/build-test-pull-request.yml
vendored
Normal file
48
.github/workflows/build-test-pull-request.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: Build Pull Request Contents
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: ["opened", "synchronize"]
|
||||
|
||||
jobs:
|
||||
build-pull-request-contents:
|
||||
name: Build Pull Request Contents
|
||||
runs-on: ubuntu-20.04
|
||||
permissions:
|
||||
pull-requests: read
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository to Actions
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Setup Node.js 18.x
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 18.x
|
||||
cache: 'yarn'
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v38
|
||||
with:
|
||||
files_yaml: |
|
||||
apiserver:
|
||||
- apiserver/**
|
||||
web:
|
||||
- web/**
|
||||
deploy:
|
||||
- space/**
|
||||
|
||||
- name: Build Plane's Main App
|
||||
if: steps.changed-files.outputs.web_any_changed == 'true'
|
||||
run: |
|
||||
yarn
|
||||
yarn build --filter=web
|
||||
|
||||
- name: Build Plane's Deploy App
|
||||
if: steps.changed-files.outputs.deploy_any_changed == 'true'
|
||||
run: |
|
||||
yarn
|
||||
yarn build --filter=space
|
||||
|
||||
|
||||
79
.github/workflows/create-sync-pr.yml
vendored
Normal file
79
.github/workflows/create-sync-pr.yml
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
name: Create PR in Plane EE Repository to sync the changes
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
types:
|
||||
- closed
|
||||
|
||||
jobs:
|
||||
create_pr:
|
||||
# Only run the job when a PR is merged
|
||||
if: github.event.pull_request.merged == true
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Check SOURCE_REPO
|
||||
id: check_repo
|
||||
env:
|
||||
SOURCE_REPO: ${{ secrets.SOURCE_REPO_NAME }}
|
||||
run: |
|
||||
echo "::set-output name=is_correct_repo::$(if [[ "$SOURCE_REPO" == "makeplane/plane" ]]; then echo 'true'; else echo 'false'; fi)"
|
||||
|
||||
- name: Checkout Code
|
||||
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Branch Name
|
||||
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
||||
run: |
|
||||
echo "SOURCE_BRANCH_NAME=${{ github.head_ref }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup GH CLI
|
||||
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
||||
run: |
|
||||
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
|
||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
|
||||
sudo apt update
|
||||
sudo apt install gh -y
|
||||
|
||||
- name: Create Pull Request
|
||||
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
|
||||
run: |
|
||||
TARGET_REPO="${{ secrets.TARGET_REPO_NAME }}"
|
||||
TARGET_BRANCH="${{ secrets.TARGET_REPO_BRANCH }}"
|
||||
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
|
||||
|
||||
git checkout $SOURCE_BRANCH
|
||||
git remote add target "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
|
||||
git push target $SOURCE_BRANCH:$SOURCE_BRANCH
|
||||
|
||||
PR_TITLE="${{ github.event.pull_request.title }}"
|
||||
PR_BODY="${{ github.event.pull_request.body }}"
|
||||
|
||||
# Remove double quotes
|
||||
PR_TITLE_CLEANED="${PR_TITLE//\"/}"
|
||||
PR_BODY_CLEANED="${PR_BODY//\"/}"
|
||||
|
||||
# Construct PR_BODY_CONTENT using a here-document
|
||||
PR_BODY_CONTENT=$(cat <<EOF
|
||||
$PR_BODY_CLEANED
|
||||
EOF
|
||||
)
|
||||
|
||||
gh pr create \
|
||||
--base $TARGET_BRANCH \
|
||||
--head $SOURCE_BRANCH \
|
||||
--title "[SYNC] $PR_TITLE_CLEANED" \
|
||||
--body "$PR_BODY_CONTENT" \
|
||||
--repo $TARGET_REPO
|
||||
77
.github/workflows/push-image-backend.yml
vendored
77
.github/workflows/push-image-backend.yml
vendored
@@ -1,77 +0,0 @@
|
||||
name: Build and Push Backend Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'develop'
|
||||
- 'master'
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
build_push_backend:
|
||||
name: Build and Push Api Server Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
with:
|
||||
platforms: linux/arm64,linux/amd64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.5.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2.1.0
|
||||
with:
|
||||
registry: "ghcr.io"
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.1.0
|
||||
with:
|
||||
registry: "registry.hub.docker.com"
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub)
|
||||
id: ghmeta
|
||||
uses: docker/metadata-action@v4.3.0
|
||||
with:
|
||||
images: makeplane/plane-backend
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker (Github)
|
||||
id: dkrmeta
|
||||
uses: docker/metadata-action@v4.3.0
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}-backend
|
||||
|
||||
- name: Build and Push to GitHub Container Registry
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: ./apiserver
|
||||
file: ./apiserver/Dockerfile.api
|
||||
platforms: linux/arm64,linux/amd64
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha
|
||||
tags: ${{ steps.ghmeta.outputs.tags }}
|
||||
labels: ${{ steps.ghmeta.outputs.labels }}
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: ./apiserver
|
||||
file: ./apiserver/Dockerfile.api
|
||||
platforms: linux/arm64,linux/amd64
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha
|
||||
tags: ${{ steps.dkrmeta.outputs.tags }}
|
||||
labels: ${{ steps.dkrmeta.outputs.labels }}
|
||||
|
||||
77
.github/workflows/push-image-frontend.yml
vendored
77
.github/workflows/push-image-frontend.yml
vendored
@@ -1,77 +0,0 @@
|
||||
name: Build and Push Frontend Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'develop'
|
||||
- 'master'
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
build_push_frontend:
|
||||
name: Build Frontend Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
with:
|
||||
platforms: linux/arm64,linux/amd64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.5.0
|
||||
|
||||
- name: Login to Github Container Registry
|
||||
uses: docker/login-action@v2.1.0
|
||||
with:
|
||||
registry: "ghcr.io"
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.1.0
|
||||
with:
|
||||
registry: "registry.hub.docker.com"
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub)
|
||||
id: ghmeta
|
||||
uses: docker/metadata-action@v4.3.0
|
||||
with:
|
||||
images: makeplane/plane-frontend
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker (Github)
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4.3.0
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}-frontend
|
||||
|
||||
- name: Build and Push to GitHub Container Registry
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: .
|
||||
file: ./apps/app/Dockerfile.web
|
||||
platforms: linux/arm64,linux/amd64
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha
|
||||
tags: ${{ steps.ghmeta.outputs.tags }}
|
||||
labels: ${{ steps.ghmeta.outputs.labels }}
|
||||
|
||||
- name: Build and Push to Docker Container Registry
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: .
|
||||
file: ./apps/app/Dockerfile.web
|
||||
platforms: linux/arm64,linux/amd64
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha
|
||||
tags: ${{ steps.dkrmeta.outputs.tags }}
|
||||
labels: ${{ steps.dkrmeta.outputs.labels }}
|
||||
|
||||
107
.github/workflows/update-docker-images.yml
vendored
Normal file
107
.github/workflows/update-docker-images.yml
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
name: Update Docker Images for Plane on Release
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [released, prereleased]
|
||||
|
||||
jobs:
|
||||
build_push_backend:
|
||||
name: Build and Push Api Server Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.5.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.1.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
||||
id: metaFrontend
|
||||
uses: docker/metadata-action@v4.3.0
|
||||
with:
|
||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
||||
id: metaBackend
|
||||
uses: docker/metadata-action@v4.3.0
|
||||
with:
|
||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
||||
id: metaSpace
|
||||
uses: docker/metadata-action@v4.3.0
|
||||
with:
|
||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
||||
id: metaProxy
|
||||
uses: docker/metadata-action@v4.3.0
|
||||
with:
|
||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
|
||||
- name: Build and Push Frontend to Docker Container Registry
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: .
|
||||
file: ./web/Dockerfile.web
|
||||
platforms: linux/amd64
|
||||
tags: ${{ steps.metaFrontend.outputs.tags }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and Push Backend to Docker Hub
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: ./apiserver
|
||||
file: ./apiserver/Dockerfile.api
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: ${{ steps.metaBackend.outputs.tags }}
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and Push Plane-Deploy to Docker Hub
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: .
|
||||
file: ./space/Dockerfile.space
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: ${{ steps.metaSpace.outputs.tags }}
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and Push Plane-Proxy to Docker Hub
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: ./nginx
|
||||
file: ./nginx/Dockerfile
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: ${{ steps.metaProxy.outputs.tags }}
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -16,6 +16,8 @@ node_modules
|
||||
|
||||
# Production
|
||||
/build
|
||||
dist/
|
||||
out/
|
||||
|
||||
# Misc
|
||||
.DS_Store
|
||||
@@ -70,4 +72,10 @@ package-lock.json
|
||||
# lock files
|
||||
package-lock.json
|
||||
pnpm-lock.yaml
|
||||
pnpm-workspace.yaml
|
||||
pnpm-workspace.yaml
|
||||
|
||||
.npmrc
|
||||
.secrets
|
||||
tmp/
|
||||
## packages
|
||||
dist
|
||||
|
||||
@@ -17,23 +17,23 @@ diverse, inclusive, and healthy community.
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
- Demonstrating empathy and kindness toward other people
|
||||
- Being respectful of differing opinions, viewpoints, and experiences
|
||||
- Giving and gracefully accepting constructive feedback
|
||||
- Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
- Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
- The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
@@ -60,7 +60,7 @@ representative at an online or offline event.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
hello@plane.so.
|
||||
squawk@plane.so.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
@@ -106,7 +106,7 @@ Violating these terms may lead to a permanent ban.
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
@@ -125,4 +125,4 @@ enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
||||
https://www.contributor-covenant.org/translations.
|
||||
|
||||
@@ -30,6 +30,48 @@ The project is a monorepo, with backend api and frontend in a single repo.
|
||||
|
||||
The backend is a django project which is kept inside apiserver
|
||||
|
||||
1. Clone the repo
|
||||
|
||||
```bash
|
||||
git clone https://github.com/makeplane/plane
|
||||
cd plane
|
||||
chmod +x setup.sh
|
||||
```
|
||||
|
||||
2. Run setup.sh
|
||||
|
||||
```bash
|
||||
./setup.sh
|
||||
```
|
||||
|
||||
3. Define `NEXT_PUBLIC_API_BASE_URL=http://localhost` in **web/.env** and **space/.env** file
|
||||
|
||||
```bash
|
||||
echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./web/.env
|
||||
```
|
||||
|
||||
```bash
|
||||
echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./space/.env
|
||||
```
|
||||
|
||||
4. Run Docker compose up
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
5. Install dependencies
|
||||
|
||||
```bash
|
||||
yarn install
|
||||
```
|
||||
|
||||
6. Run the web app in development mode
|
||||
|
||||
```bash
|
||||
yarn dev
|
||||
```
|
||||
|
||||
## Missing a Feature?
|
||||
|
||||
If a feature is missing, you can directly _request_ a new one [here](https://github.com/makeplane/plane/issues/new?assignees=&labels=feature&template=feature_request.yml&title=%F0%9F%9A%80+Feature%3A+). You also can do the same by choosing "🚀 Feature" when raising a [New Issue](https://github.com/makeplane/plane/issues/new/choose) on our GitHub Repository.
|
||||
|
||||
14
Dockerfile
14
Dockerfile
@@ -5,9 +5,11 @@ WORKDIR /app
|
||||
ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER
|
||||
|
||||
RUN yarn global add turbo
|
||||
RUN apk add tree
|
||||
COPY . .
|
||||
|
||||
RUN turbo prune --scope=app --docker
|
||||
RUN turbo prune --scope=app --scope=plane-deploy --docker
|
||||
CMD tree -I node_modules/
|
||||
|
||||
# Add lockfile and package.json's of isolated subworkspace
|
||||
FROM node:18-alpine AS installer
|
||||
@@ -21,14 +23,14 @@ COPY --from=builder /app/out/json/ .
|
||||
COPY --from=builder /app/out/yarn.lock ./yarn.lock
|
||||
RUN yarn install
|
||||
|
||||
# Build the project
|
||||
# # Build the project
|
||||
COPY --from=builder /app/out/full/ .
|
||||
COPY turbo.json turbo.json
|
||||
COPY replace-env-vars.sh /usr/local/bin/
|
||||
USER root
|
||||
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
||||
|
||||
RUN yarn turbo run build --filter=app
|
||||
RUN yarn turbo run build
|
||||
|
||||
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
||||
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
||||
@@ -42,7 +44,6 @@ ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
ENV DJANGO_SETTINGS_MODULE plane.settings.production
|
||||
ENV DOCKERIZED 1
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
@@ -96,11 +97,16 @@ RUN adduser --system --uid 1001 captain
|
||||
|
||||
COPY --from=installer /app/apps/app/next.config.js .
|
||||
COPY --from=installer /app/apps/app/package.json .
|
||||
COPY --from=installer /app/apps/space/next.config.js .
|
||||
COPY --from=installer /app/apps/space/package.json .
|
||||
|
||||
COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone ./
|
||||
|
||||
COPY --from=installer --chown=captain:plane /app/apps/app/.next/static ./apps/app/.next/static
|
||||
|
||||
COPY --from=installer --chown=captain:plane /app/apps/space/.next/standalone ./
|
||||
COPY --from=installer --chown=captain:plane /app/apps/space/.next ./apps/space/.next
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
# RUN rm /etc/nginx/conf.d/default.conf
|
||||
|
||||
149
ENV_SETUP.md
Normal file
149
ENV_SETUP.md
Normal file
@@ -0,0 +1,149 @@
|
||||
# Environment Variables
|
||||
|
||||
|
||||
Environment variables are distributed in various files. Please refer them carefully.
|
||||
|
||||
## {PROJECT_FOLDER}/.env
|
||||
|
||||
File is available in the project root folder
|
||||
|
||||
```
|
||||
# Database Settings
|
||||
PGUSER="plane"
|
||||
PGPASSWORD="plane"
|
||||
PGHOST="plane-db"
|
||||
PGDATABASE="plane"
|
||||
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||
|
||||
# Redis Settings
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
AWS_SECRET_ACCESS_KEY="secret-key"
|
||||
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
|
||||
# Changing this requires change in the nginx.conf for uploads if using minio setup
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
# Maximum file upload limit
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
|
||||
# GPT settings
|
||||
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||
OPENAI_API_KEY="sk-" # deprecated
|
||||
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
```
|
||||
|
||||
|
||||
|
||||
## {PROJECT_FOLDER}/web/.env.example
|
||||
|
||||
|
||||
|
||||
```
|
||||
# Enable/Disable OAUTH - default 0 for selfhosted instance
|
||||
NEXT_PUBLIC_ENABLE_OAUTH=0
|
||||
# Public boards deploy URL
|
||||
NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
|
||||
```
|
||||
|
||||
|
||||
|
||||
## {PROJECT_FOLDER}/spaces/.env.example
|
||||
|
||||
|
||||
|
||||
```
|
||||
# Flag to toggle OAuth
|
||||
NEXT_PUBLIC_ENABLE_OAUTH=0
|
||||
```
|
||||
|
||||
|
||||
|
||||
## {PROJECT_FOLDER}/apiserver/.env
|
||||
|
||||
|
||||
|
||||
```
|
||||
# Backend
|
||||
# Debug value for api server use it as 0 for production use
|
||||
DEBUG=0
|
||||
DJANGO_SETTINGS_MODULE="plane.settings.selfhosted"
|
||||
|
||||
# Error logs
|
||||
SENTRY_DSN=""
|
||||
|
||||
# Database Settings
|
||||
PGUSER="plane"
|
||||
PGPASSWORD="plane"
|
||||
PGHOST="plane-db"
|
||||
PGDATABASE="plane"
|
||||
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||
|
||||
# Redis Settings
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||
|
||||
# Email Settings
|
||||
EMAIL_HOST=""
|
||||
EMAIL_HOST_USER=""
|
||||
EMAIL_HOST_PASSWORD=""
|
||||
EMAIL_PORT=587
|
||||
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
||||
EMAIL_USE_TLS="1"
|
||||
EMAIL_USE_SSL="0"
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
AWS_SECRET_ACCESS_KEY="secret-key"
|
||||
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
|
||||
# Changing this requires change in the nginx.conf for uploads if using minio setup
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
# Maximum file upload limit
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
|
||||
# GPT settings
|
||||
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||
OPENAI_API_KEY="sk-" # deprecated
|
||||
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||
|
||||
# Settings related to Docker
|
||||
DOCKERIZED=1 # Deprecated
|
||||
|
||||
# Github
|
||||
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
||||
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
|
||||
# Default Creds
|
||||
DEFAULT_EMAIL="captain@plane.so"
|
||||
DEFAULT_PASSWORD="password123"
|
||||
|
||||
# SignUps
|
||||
ENABLE_SIGNUP="1"
|
||||
|
||||
# Email Redirection URL
|
||||
WEB_URL="http://localhost"
|
||||
```
|
||||
|
||||
## Updates
|
||||
|
||||
- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects.
|
||||
- The naming convention for containers and images has been updated.
|
||||
- The plane-worker image will no longer be maintained, as it has been merged with plane-backend.
|
||||
- The Tiptap pro-extension dependency has been removed, eliminating the need for Tiptap API keys.
|
||||
- The image name for Plane deployment has been changed to plane-space.
|
||||
81
README.md
81
README.md
@@ -2,31 +2,31 @@
|
||||
|
||||
<p align="center">
|
||||
<a href="https://plane.so">
|
||||
<img src="https://res.cloudinary.com/toolspacedev/image/upload/v1680596414/Plane/Plane_Icon_Blue_on_White_150x150_muysa3.jpg" alt="Plane Logo" width="70">
|
||||
<img src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_logo_.webp" alt="Plane Logo" width="70">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
<h3 align="center"><b>Plane</b></h3>
|
||||
<p align="center"><b>Open-source, self-hosted project planning tool</b></p>
|
||||
<p align="center"><b>Flexible, extensible open-source project management</b></p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://discord.com/invite/A92xrEGCge">
|
||||
<img alt="Discord" src="https://img.shields.io/discord/1031547764020084846?color=5865F2&label=Discord&style=for-the-badge" />
|
||||
<img alt="Discord online members" src="https://img.shields.io/discord/1031547764020084846?color=5865F2&label=Discord&style=for-the-badge" />
|
||||
</a>
|
||||
<img alt="Discord" src="https://img.shields.io/github/commit-activity/m/makeplane/plane?style=for-the-badge" />
|
||||
<img alt="Commit activity per month" src="https://img.shields.io/github/commit-activity/m/makeplane/plane?style=for-the-badge" />
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<a href="https://app.plane.so/#gh-light-mode-only" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Screen.png?updatedAt=1684942001069"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_screen.webp"
|
||||
alt="Plane Screens"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
<a href="https://app.plane.so/#gh-dark-mode-only" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Screens_Dark_Mode.png?updatedAt=1684942388044"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_screens_dark_mode.webp"
|
||||
alt="Plane Screens"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -35,58 +35,58 @@
|
||||
|
||||
Meet [Plane](https://plane.so). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind 🧘♀️.
|
||||
|
||||
|
||||
> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve on our upcoming releases.
|
||||
|
||||
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. Plane Cloud offers a hosted solution for Plane. If you prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/self-hosting).
|
||||
|
||||
## ⚡️ Contributors Quick Start
|
||||
|
||||
## ⚡️ Quick start with Docker Compose
|
||||
### Prerequisite
|
||||
|
||||
### Docker Compose Setup
|
||||
Development system must have docker engine installed and running.
|
||||
|
||||
- Clone the repository
|
||||
### Steps
|
||||
|
||||
Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
|
||||
|
||||
1. Clone the code locally using `git clone https://github.com/makeplane/plane.git`
|
||||
1. Switch to the code folder `cd plane`
|
||||
1. Create your feature or fix branch you plan to work on using `git checkout -b <feature-branch-name>`
|
||||
1. Open terminal and run `./setup.sh`
|
||||
1. Open the code on VSCode or similar equivalent IDE
|
||||
1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system
|
||||
1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d`
|
||||
|
||||
```bash
|
||||
git clone https://github.com/makeplane/plane
|
||||
cd plane
|
||||
chmod +x setup.sh
|
||||
./setup.sh
|
||||
```
|
||||
|
||||
- Run setup.sh
|
||||
You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload)
|
||||
|
||||
```bash
|
||||
./setup.sh http://localhost
|
||||
```
|
||||
Thats it!
|
||||
|
||||
> If running in a cloud env replace localhost with public facing IP address of the VM
|
||||
## 🍙 Self Hosting
|
||||
|
||||
- Run Docker compose up
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
<strong>You can use the default email and password for your first login `captain@plane.so` and `password123`.</strong>
|
||||
For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/self-hosting) documentation page
|
||||
|
||||
## 🚀 Features
|
||||
|
||||
* **Issue Planning and Tracking**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to issues for better organization and tracking.
|
||||
* **Issue Attachments**: Collaborate effectively by attaching files to issues, making it easy for your team to find and share important project-related documents.
|
||||
* **Layouts**: Customize your project view with your preferred layout - choose from List, Kanban, or Calendar to visualize your project in a way that makes sense to you.
|
||||
* **Cycles**: Plan sprints with Cycles to keep your team on track and productive. Gain insights into your project's progress with burn-down charts and other useful features.
|
||||
* **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to easily track and plan your project's progress.
|
||||
* **Views**: Create custom filters to display only the issues that matter to you. Save and share your filters in just a few clicks.
|
||||
* **Pages**: Plane pages function as an AI-powered notepad, allowing you to easily document issues, cycle plans, and module details, and then synchronize them with your issues.
|
||||
* **Command K**: Enjoy a better user experience with the new Command + K menu. Easily manage and navigate through your projects from one convenient location.
|
||||
* **GitHub Sync**: Streamline your planning process by syncing your GitHub issues with Plane. Keep all your issues in one place for better tracking and collaboration.
|
||||
- **Issue Planning and Tracking**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to issues for better organization and tracking.
|
||||
- **Issue Attachments**: Collaborate effectively by attaching files to issues, making it easy for your team to find and share important project-related documents.
|
||||
- **Layouts**: Customize your project view with your preferred layout - choose from List, Kanban, or Calendar to visualize your project in a way that makes sense to you.
|
||||
- **Cycles**: Plan sprints with Cycles to keep your team on track and productive. Gain insights into your project's progress with burn-down charts and other useful features.
|
||||
- **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to easily track and plan your project's progress.
|
||||
- **Views**: Create custom filters to display only the issues that matter to you. Save and share your filters in just a few clicks.
|
||||
- **Pages**: Plane pages function as an AI-powered notepad, allowing you to easily document issues, cycle plans, and module details, and then synchronize them with your issues.
|
||||
- **Command K**: Enjoy a better user experience with the new Command + K menu. Easily manage and navigate through your projects from one convenient location.
|
||||
- **GitHub Sync**: Streamline your planning process by syncing your GitHub issues with Plane. Keep all your issues in one place for better tracking and collaboration.
|
||||
|
||||
## 📸 Screenshots
|
||||
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Views_Dark_Mode.png?updatedAt=1684943050275"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_views_dark_mode.webp"
|
||||
alt="Plane Views"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -95,7 +95,7 @@ docker compose up -d
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Issue_Detail_Dark_Mode.png?updatedAt=1684943050202"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_issue_detail_dark_mode.webp"
|
||||
alt="Plane Issue Details"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -104,7 +104,7 @@ docker compose up -d
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Cycles___Modules_Dark_Mode.png?updatedAt=1684943050281"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_cycles_modules_dark_mode.webp"
|
||||
alt="Plane Cycles and Modules"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -113,7 +113,7 @@ docker compose up -d
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Analytics_Dark_Mode.png?updatedAt=1684944596824"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_analytics_dark_mode.webp"
|
||||
alt="Plane Analytics"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -122,7 +122,7 @@ docker compose up -d
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Pages_Dark_Mode.png?updatedAt=1684943050202"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_pages_dark_mode.webp"
|
||||
alt="Plane Pages"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -132,7 +132,7 @@ docker compose up -d
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Commad_K_Dark_Mode.png?updatedAt=1684943050312"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_commad_k_dark_mode.webp"
|
||||
alt="Plane Command Menu"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -140,7 +140,6 @@ docker compose up -d
|
||||
</p>
|
||||
</p>
|
||||
|
||||
|
||||
## 📚Documentation
|
||||
|
||||
For full documentation, visit [docs.plane.so](https://docs.plane.so/)
|
||||
|
||||
75
apiserver/.env.example
Normal file
75
apiserver/.env.example
Normal file
@@ -0,0 +1,75 @@
|
||||
# Backend
|
||||
# Debug value for api server use it as 0 for production use
|
||||
DEBUG=0
|
||||
CORS_ALLOWED_ORIGINS=""
|
||||
|
||||
# Error logs
|
||||
SENTRY_DSN=""
|
||||
|
||||
# Database Settings
|
||||
PGUSER="plane"
|
||||
PGPASSWORD="plane"
|
||||
PGHOST="plane-db"
|
||||
PGDATABASE="plane"
|
||||
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||
|
||||
# Redis Settings
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||
|
||||
# Email Settings
|
||||
EMAIL_HOST=""
|
||||
EMAIL_HOST_USER=""
|
||||
EMAIL_HOST_PASSWORD=""
|
||||
EMAIL_PORT=587
|
||||
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
||||
EMAIL_USE_TLS="1"
|
||||
EMAIL_USE_SSL="0"
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
AWS_SECRET_ACCESS_KEY="secret-key"
|
||||
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
|
||||
# Changing this requires change in the nginx.conf for uploads if using minio setup
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
# Maximum file upload limit
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
|
||||
# GPT settings
|
||||
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||
OPENAI_API_KEY="sk-" # deprecated
|
||||
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||
|
||||
# Github
|
||||
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
||||
|
||||
# Settings related to Docker
|
||||
DOCKERIZED=1
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
|
||||
# Default Creds
|
||||
DEFAULT_EMAIL="captain@plane.so"
|
||||
DEFAULT_PASSWORD="password123"
|
||||
|
||||
# SignUps
|
||||
ENABLE_SIGNUP="1"
|
||||
|
||||
|
||||
# Enable Email/Password Signup
|
||||
ENABLE_EMAIL_PASSWORD="1"
|
||||
|
||||
# Enable Magic link Login
|
||||
ENABLE_MAGIC_LINK_LOGIN="0"
|
||||
|
||||
# Email redirections and minio domain settings
|
||||
WEB_URL="http://localhost"
|
||||
|
||||
# Gunicorn Workers
|
||||
GUNICORN_WORKERS=2
|
||||
|
||||
@@ -49,7 +49,7 @@ USER root
|
||||
RUN apk --no-cache add "bash~=5.2"
|
||||
COPY ./bin ./bin/
|
||||
|
||||
RUN chmod +x ./bin/takeoff ./bin/worker
|
||||
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
||||
RUN chmod -R 777 /code
|
||||
|
||||
USER captain
|
||||
|
||||
52
apiserver/Dockerfile.dev
Normal file
52
apiserver/Dockerfile.dev
Normal file
@@ -0,0 +1,52 @@
|
||||
FROM python:3.11.1-alpine3.17 AS backend
|
||||
|
||||
# set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
|
||||
RUN apk --no-cache add \
|
||||
"bash~=5.2" \
|
||||
"libpq~=15" \
|
||||
"libxslt~=1.1" \
|
||||
"nodejs-current~=19" \
|
||||
"xmlsec~=1.2" \
|
||||
"libffi-dev" \
|
||||
"bash~=5.2" \
|
||||
"g++~=12.2" \
|
||||
"gcc~=12.2" \
|
||||
"cargo~=1.64" \
|
||||
"git~=2" \
|
||||
"make~=4.3" \
|
||||
"postgresql13-dev~=13" \
|
||||
"libc-dev" \
|
||||
"linux-headers"
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
COPY requirements.txt ./requirements.txt
|
||||
ADD requirements ./requirements
|
||||
|
||||
RUN pip install -r requirements.txt --compile --no-cache-dir
|
||||
|
||||
RUN addgroup -S plane && \
|
||||
adduser -S captain -G plane
|
||||
|
||||
RUN chown captain.plane /code
|
||||
|
||||
USER captain
|
||||
|
||||
# Add in Django deps and generate Django's static files
|
||||
|
||||
USER root
|
||||
|
||||
# RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
||||
RUN chmod -R 777 /code
|
||||
|
||||
USER captain
|
||||
|
||||
# Expose container port and run entry point script
|
||||
EXPOSE 8000
|
||||
|
||||
# CMD [ "./bin/takeoff" ]
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
web: gunicorn -w 4 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:$PORT --config gunicorn.config.py --max-requests 10000 --max-requests-jitter 1000 --access-logfile -
|
||||
web: gunicorn -w 4 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:$PORT --max-requests 10000 --max-requests-jitter 1000 --access-logfile -
|
||||
worker: celery -A plane worker -l info
|
||||
beat: celery -A plane beat -l INFO
|
||||
5
apiserver/bin/beat
Normal file
5
apiserver/bin/beat
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
python manage.py wait_for_db
|
||||
celery -A plane beat -l info
|
||||
83
apiserver/bin/bucket_script.py
Normal file
83
apiserver/bin/bucket_script.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import os, sys
|
||||
import boto3
|
||||
import json
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
|
||||
sys.path.append("/code")
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
|
||||
import django
|
||||
|
||||
django.setup()
|
||||
|
||||
def set_bucket_public_policy(s3_client, bucket_name):
|
||||
public_policy = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [{
|
||||
"Effect": "Allow",
|
||||
"Principal": "*",
|
||||
"Action": ["s3:GetObject"],
|
||||
"Resource": [f"arn:aws:s3:::{bucket_name}/*"]
|
||||
}]
|
||||
}
|
||||
|
||||
try:
|
||||
s3_client.put_bucket_policy(
|
||||
Bucket=bucket_name,
|
||||
Policy=json.dumps(public_policy)
|
||||
)
|
||||
print(f"Public read access policy set for bucket '{bucket_name}'.")
|
||||
except ClientError as e:
|
||||
print(f"Error setting public read access policy: {e}")
|
||||
|
||||
|
||||
|
||||
def create_bucket():
|
||||
try:
|
||||
from django.conf import settings
|
||||
|
||||
# Create a session using the credentials from Django settings
|
||||
session = boto3.session.Session(
|
||||
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
||||
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
||||
)
|
||||
|
||||
# Create an S3 client using the session
|
||||
s3_client = session.client('s3', endpoint_url=settings.AWS_S3_ENDPOINT_URL)
|
||||
bucket_name = settings.AWS_STORAGE_BUCKET_NAME
|
||||
|
||||
print("Checking bucket...")
|
||||
|
||||
# Check if the bucket exists
|
||||
s3_client.head_bucket(Bucket=bucket_name)
|
||||
|
||||
# If head_bucket does not raise an exception, the bucket exists
|
||||
print(f"Bucket '{bucket_name}' already exists.")
|
||||
|
||||
set_bucket_public_policy(s3_client, bucket_name)
|
||||
|
||||
except ClientError as e:
|
||||
error_code = int(e.response['Error']['Code'])
|
||||
bucket_name = settings.AWS_STORAGE_BUCKET_NAME
|
||||
if error_code == 404:
|
||||
# Bucket does not exist, create it
|
||||
print(f"Bucket '{bucket_name}' does not exist. Creating bucket...")
|
||||
try:
|
||||
s3_client.create_bucket(Bucket=bucket_name)
|
||||
print(f"Bucket '{bucket_name}' created successfully.")
|
||||
set_bucket_public_policy(s3_client, bucket_name)
|
||||
except ClientError as create_error:
|
||||
print(f"Failed to create bucket: {create_error}")
|
||||
elif error_code == 403:
|
||||
# Access to the bucket is forbidden
|
||||
print(f"Access to the bucket '{bucket_name}' is forbidden. Check permissions.")
|
||||
else:
|
||||
# Another ClientError occurred
|
||||
print(f"Failed to check bucket: {e}")
|
||||
except Exception as ex:
|
||||
# Handle any other exception
|
||||
print(f"An error occurred: {ex}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
create_bucket()
|
||||
@@ -5,5 +5,7 @@ python manage.py migrate
|
||||
|
||||
# Create a Default User
|
||||
python bin/user_script.py
|
||||
# Create the default bucket
|
||||
python bin/bucket_script.py
|
||||
|
||||
exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --config gunicorn.config.py --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||
exec gunicorn -w $GUNICORN_WORKERS -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||
|
||||
@@ -19,9 +19,9 @@ def populate():
|
||||
user = User.objects.create(email=default_email, username=uuid.uuid4().hex)
|
||||
user.set_password(default_password)
|
||||
user.save()
|
||||
print("User created")
|
||||
|
||||
print("Success")
|
||||
print(f"User created with an email: {default_email}")
|
||||
else:
|
||||
print(f"User already exists with the default email: {default_email}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -3,4 +3,4 @@ from psycogreen.gevent import patch_psycopg
|
||||
|
||||
def post_fork(server, worker):
|
||||
patch_psycopg()
|
||||
worker.log.info("Made Psycopg2 Green")
|
||||
worker.log.info("Made Psycopg2 Green")
|
||||
|
||||
@@ -1,2 +1,17 @@
|
||||
from .workspace import WorkSpaceBasePermission, WorkSpaceAdminPermission, WorkspaceEntityPermission
|
||||
from .project import ProjectBasePermission, ProjectEntityPermission, ProjectMemberPermission, ProjectLitePermission
|
||||
|
||||
from .workspace import (
|
||||
WorkSpaceBasePermission,
|
||||
WorkspaceOwnerPermission,
|
||||
WorkSpaceAdminPermission,
|
||||
WorkspaceEntityPermission,
|
||||
WorkspaceViewerPermission,
|
||||
WorkspaceUserPermission,
|
||||
)
|
||||
from .project import (
|
||||
ProjectBasePermission,
|
||||
ProjectEntityPermission,
|
||||
ProjectMemberPermission,
|
||||
ProjectLitePermission,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -13,14 +13,15 @@ Guest = 5
|
||||
|
||||
class ProjectBasePermission(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
|
||||
if request.user.is_anonymous:
|
||||
return False
|
||||
|
||||
## Safe Methods -> Handle the filtering logic in queryset
|
||||
if request.method in SAFE_METHODS:
|
||||
return WorkspaceMember.objects.filter(
|
||||
workspace__slug=view.workspace_slug, member=request.user
|
||||
workspace__slug=view.workspace_slug,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
## Only workspace owners or admins can create the projects
|
||||
@@ -29,6 +30,7 @@ class ProjectBasePermission(BasePermission):
|
||||
workspace__slug=view.workspace_slug,
|
||||
member=request.user,
|
||||
role__in=[Admin, Member],
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
## Only Project Admins can update project attributes
|
||||
@@ -37,19 +39,21 @@ class ProjectBasePermission(BasePermission):
|
||||
member=request.user,
|
||||
role=Admin,
|
||||
project_id=view.project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
|
||||
class ProjectMemberPermission(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
|
||||
if request.user.is_anonymous:
|
||||
return False
|
||||
|
||||
## Safe Methods -> Handle the filtering logic in queryset
|
||||
if request.method in SAFE_METHODS:
|
||||
return ProjectMember.objects.filter(
|
||||
workspace__slug=view.workspace_slug, member=request.user
|
||||
workspace__slug=view.workspace_slug,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
).exists()
|
||||
## Only workspace owners or admins can create the projects
|
||||
if request.method == "POST":
|
||||
@@ -57,6 +61,7 @@ class ProjectMemberPermission(BasePermission):
|
||||
workspace__slug=view.workspace_slug,
|
||||
member=request.user,
|
||||
role__in=[Admin, Member],
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
## Only Project Admins can update project attributes
|
||||
@@ -65,12 +70,12 @@ class ProjectMemberPermission(BasePermission):
|
||||
member=request.user,
|
||||
role__in=[Admin, Member],
|
||||
project_id=view.project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
|
||||
class ProjectEntityPermission(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
|
||||
if request.user.is_anonymous:
|
||||
return False
|
||||
|
||||
@@ -80,6 +85,7 @@ class ProjectEntityPermission(BasePermission):
|
||||
workspace__slug=view.workspace_slug,
|
||||
member=request.user,
|
||||
project_id=view.project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
## Only project members or admins can create and edit the project attributes
|
||||
@@ -88,17 +94,18 @@ class ProjectEntityPermission(BasePermission):
|
||||
member=request.user,
|
||||
role__in=[Admin, Member],
|
||||
project_id=view.project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
|
||||
class ProjectLitePermission(BasePermission):
|
||||
|
||||
def has_permission(self, request, view):
|
||||
if request.user.is_anonymous:
|
||||
return False
|
||||
|
||||
|
||||
return ProjectMember.objects.filter(
|
||||
workspace__slug=view.workspace_slug,
|
||||
member=request.user,
|
||||
project_id=view.project_id,
|
||||
).exists()
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
@@ -32,15 +32,31 @@ class WorkSpaceBasePermission(BasePermission):
|
||||
member=request.user,
|
||||
workspace__slug=view.workspace_slug,
|
||||
role__in=[Owner, Admin],
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
# allow only owner to delete the workspace
|
||||
if request.method == "DELETE":
|
||||
return WorkspaceMember.objects.filter(
|
||||
member=request.user, workspace__slug=view.workspace_slug, role=Owner
|
||||
member=request.user,
|
||||
workspace__slug=view.workspace_slug,
|
||||
role=Owner,
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
|
||||
class WorkspaceOwnerPermission(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
if request.user.is_anonymous:
|
||||
return False
|
||||
|
||||
return WorkspaceMember.objects.filter(
|
||||
workspace__slug=view.workspace_slug,
|
||||
member=request.user,
|
||||
role=Owner,
|
||||
).exists()
|
||||
|
||||
|
||||
class WorkSpaceAdminPermission(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
if request.user.is_anonymous:
|
||||
@@ -50,6 +66,7 @@ class WorkSpaceAdminPermission(BasePermission):
|
||||
member=request.user,
|
||||
workspace__slug=view.workspace_slug,
|
||||
role__in=[Owner, Admin],
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
|
||||
@@ -58,6 +75,42 @@ class WorkspaceEntityPermission(BasePermission):
|
||||
if request.user.is_anonymous:
|
||||
return False
|
||||
|
||||
## Safe Methods -> Handle the filtering logic in queryset
|
||||
if request.method in SAFE_METHODS:
|
||||
return WorkspaceMember.objects.filter(
|
||||
workspace__slug=view.workspace_slug,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
return WorkspaceMember.objects.filter(
|
||||
member=request.user, workspace__slug=view.workspace_slug
|
||||
member=request.user,
|
||||
workspace__slug=view.workspace_slug,
|
||||
role__in=[Owner, Admin],
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
|
||||
class WorkspaceViewerPermission(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
if request.user.is_anonymous:
|
||||
return False
|
||||
|
||||
return WorkspaceMember.objects.filter(
|
||||
member=request.user,
|
||||
workspace__slug=view.workspace_slug,
|
||||
role__gte=10,
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
|
||||
class WorkspaceUserPermission(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
if request.user.is_anonymous:
|
||||
return False
|
||||
|
||||
return WorkspaceMember.objects.filter(
|
||||
member=request.user,
|
||||
workspace__slug=view.workspace_slug,
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
from .base import BaseSerializer
|
||||
from .people import (
|
||||
from .user import (
|
||||
UserSerializer,
|
||||
UserLiteSerializer,
|
||||
ChangePasswordSerializer,
|
||||
ResetPasswordSerializer,
|
||||
TokenSerializer,
|
||||
UserAdminLiteSerializer,
|
||||
UserMeSerializer,
|
||||
UserMeSettingsSerializer,
|
||||
)
|
||||
from .user import UserSerializer, UserLiteSerializer
|
||||
from .workspace import (
|
||||
WorkSpaceSerializer,
|
||||
WorkSpaceMemberSerializer,
|
||||
@@ -12,9 +15,12 @@ from .workspace import (
|
||||
WorkSpaceMemberInviteSerializer,
|
||||
WorkspaceLiteSerializer,
|
||||
WorkspaceThemeSerializer,
|
||||
WorkspaceMemberAdminSerializer,
|
||||
WorkspaceMemberMeSerializer,
|
||||
)
|
||||
from .project import (
|
||||
ProjectSerializer,
|
||||
ProjectListSerializer,
|
||||
ProjectDetailSerializer,
|
||||
ProjectMemberSerializer,
|
||||
ProjectMemberInviteSerializer,
|
||||
@@ -22,18 +28,24 @@ from .project import (
|
||||
ProjectFavoriteSerializer,
|
||||
ProjectLiteSerializer,
|
||||
ProjectMemberLiteSerializer,
|
||||
ProjectDeployBoardSerializer,
|
||||
ProjectMemberAdminSerializer,
|
||||
ProjectPublicMemberSerializer,
|
||||
)
|
||||
from .state import StateSerializer, StateLiteSerializer
|
||||
from .view import IssueViewSerializer, IssueViewFavoriteSerializer
|
||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer
|
||||
from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
|
||||
from .cycle import (
|
||||
CycleSerializer,
|
||||
CycleIssueSerializer,
|
||||
CycleFavoriteSerializer,
|
||||
CycleWriteSerializer,
|
||||
)
|
||||
from .asset import FileAssetSerializer
|
||||
from .issue import (
|
||||
IssueCreateSerializer,
|
||||
IssueActivitySerializer,
|
||||
IssueCommentSerializer,
|
||||
IssuePropertySerializer,
|
||||
BlockerIssueSerializer,
|
||||
BlockedIssueSerializer,
|
||||
IssueAssigneeSerializer,
|
||||
LabelSerializer,
|
||||
IssueSerializer,
|
||||
@@ -43,6 +55,12 @@ from .issue import (
|
||||
IssueLiteSerializer,
|
||||
IssueAttachmentSerializer,
|
||||
IssueSubscriberSerializer,
|
||||
IssueReactionSerializer,
|
||||
CommentReactionSerializer,
|
||||
IssueVoteSerializer,
|
||||
IssueRelationSerializer,
|
||||
RelatedIssueSerializer,
|
||||
IssuePublicSerializer,
|
||||
)
|
||||
|
||||
from .module import (
|
||||
@@ -53,7 +71,7 @@ from .module import (
|
||||
ModuleFavoriteSerializer,
|
||||
)
|
||||
|
||||
from .api_token import APITokenSerializer
|
||||
from .api import APITokenSerializer, APITokenReadSerializer
|
||||
|
||||
from .integration import (
|
||||
IntegrationSerializer,
|
||||
@@ -80,3 +98,7 @@ from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSeriali
|
||||
from .analytic import AnalyticViewSerializer
|
||||
|
||||
from .notification import NotificationSerializer
|
||||
|
||||
from .exporter import ExporterHistorySerializer
|
||||
|
||||
from .webhook import WebhookSerializer, WebhookLogSerializer
|
||||
@@ -17,7 +17,7 @@ class AnalyticViewSerializer(BaseSerializer):
|
||||
if bool(query_params):
|
||||
validated_data["query"] = issue_filters(query_params, "POST")
|
||||
else:
|
||||
validated_data["query"] = dict()
|
||||
validated_data["query"] = {}
|
||||
return AnalyticView.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
@@ -25,6 +25,6 @@ class AnalyticViewSerializer(BaseSerializer):
|
||||
if bool(query_params):
|
||||
validated_data["query"] = issue_filters(query_params, "POST")
|
||||
else:
|
||||
validated_data["query"] = dict()
|
||||
validated_data["query"] = {}
|
||||
validated_data["query"] = issue_filters(query_params, "PATCH")
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
31
apiserver/plane/api/serializers/api.py
Normal file
31
apiserver/plane/api/serializers/api.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import APIToken, APIActivityLog
|
||||
|
||||
|
||||
class APITokenSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = APIToken
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"token",
|
||||
"expired_at",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"workspace",
|
||||
"user",
|
||||
]
|
||||
|
||||
|
||||
class APITokenReadSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = APIToken
|
||||
exclude = ('token',)
|
||||
|
||||
|
||||
class APIActivityLogSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = APIActivityLog
|
||||
fields = "__all__"
|
||||
@@ -1,14 +0,0 @@
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import APIToken
|
||||
|
||||
|
||||
class APITokenSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = APIToken
|
||||
fields = [
|
||||
"label",
|
||||
"user",
|
||||
"user_type",
|
||||
"workspace",
|
||||
"created_at",
|
||||
]
|
||||
@@ -3,3 +3,56 @@ from rest_framework import serializers
|
||||
|
||||
class BaseSerializer(serializers.ModelSerializer):
|
||||
id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
|
||||
class DynamicBaseSerializer(BaseSerializer):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# If 'fields' is provided in the arguments, remove it and store it separately.
|
||||
# This is done so as not to pass this custom argument up to the superclass.
|
||||
fields = kwargs.pop("fields", None)
|
||||
|
||||
# Call the initialization of the superclass.
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# If 'fields' was provided, filter the fields of the serializer accordingly.
|
||||
if fields is not None:
|
||||
self.fields = self._filter_fields(fields)
|
||||
|
||||
def _filter_fields(self, fields):
|
||||
"""
|
||||
Adjust the serializer's fields based on the provided 'fields' list.
|
||||
|
||||
:param fields: List or dictionary specifying which fields to include in the serializer.
|
||||
:return: The updated fields for the serializer.
|
||||
"""
|
||||
# Check each field_name in the provided fields.
|
||||
for field_name in fields:
|
||||
# If the field is a dictionary (indicating nested fields),
|
||||
# loop through its keys and values.
|
||||
if isinstance(field_name, dict):
|
||||
for key, value in field_name.items():
|
||||
# If the value of this nested field is a list,
|
||||
# perform a recursive filter on it.
|
||||
if isinstance(value, list):
|
||||
self._filter_fields(self.fields[key], value)
|
||||
|
||||
# Create a list to store allowed fields.
|
||||
allowed = []
|
||||
for item in fields:
|
||||
# If the item is a string, it directly represents a field's name.
|
||||
if isinstance(item, str):
|
||||
allowed.append(item)
|
||||
# If the item is a dictionary, it represents a nested field.
|
||||
# Add the key of this dictionary to the allowed list.
|
||||
elif isinstance(item, dict):
|
||||
allowed.append(list(item.keys())[0])
|
||||
|
||||
# Convert the current serializer's fields and the allowed fields to sets.
|
||||
existing = set(self.fields)
|
||||
allowed = set(allowed)
|
||||
|
||||
# Remove fields from the serializer that aren't in the 'allowed' list.
|
||||
for field_name in (existing - allowed):
|
||||
self.fields.pop(field_name)
|
||||
|
||||
return self.fields
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
# Django imports
|
||||
from django.db.models.functions import TruncDate
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
@@ -13,6 +10,21 @@ from .project import ProjectLiteSerializer
|
||||
from plane.db.models import Cycle, CycleIssue, CycleFavorite
|
||||
|
||||
|
||||
class CycleWriteSerializer(BaseSerializer):
|
||||
def validate(self, data):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("end_date", None) is not None
|
||||
and data.get("start_date", None) > data.get("end_date", None)
|
||||
):
|
||||
raise serializers.ValidationError("Start date cannot exceed end date")
|
||||
return data
|
||||
|
||||
class Meta:
|
||||
model = Cycle
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class CycleSerializer(BaseSerializer):
|
||||
owned_by = UserLiteSerializer(read_only=True)
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
@@ -23,21 +35,31 @@ class CycleSerializer(BaseSerializer):
|
||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_issues = serializers.IntegerField(read_only=True)
|
||||
assignees = serializers.SerializerMethodField(read_only=True)
|
||||
labels = serializers.SerializerMethodField(read_only=True)
|
||||
total_estimates = serializers.IntegerField(read_only=True)
|
||||
completed_estimates = serializers.IntegerField(read_only=True)
|
||||
started_estimates = serializers.IntegerField(read_only=True)
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("end_date", None) is not None
|
||||
and data.get("start_date", None) > data.get("end_date", None)
|
||||
):
|
||||
raise serializers.ValidationError("Start date cannot exceed end date")
|
||||
return data
|
||||
|
||||
def get_assignees(self, obj):
|
||||
members = [
|
||||
{
|
||||
"avatar": assignee.avatar,
|
||||
"first_name": assignee.first_name,
|
||||
"display_name": assignee.display_name,
|
||||
"id": assignee.id,
|
||||
}
|
||||
for issue_cycle in obj.issue_cycle.all()
|
||||
for issue_cycle in obj.issue_cycle.prefetch_related(
|
||||
"issue__assignees"
|
||||
).all()
|
||||
for assignee in issue_cycle.issue.assignees.all()
|
||||
]
|
||||
# Use a set comprehension to return only the unique objects
|
||||
@@ -47,24 +69,6 @@ class CycleSerializer(BaseSerializer):
|
||||
unique_list = [dict(item) for item in unique_objects]
|
||||
|
||||
return unique_list
|
||||
|
||||
def get_labels(self, obj):
|
||||
labels = [
|
||||
{
|
||||
"name": label.name,
|
||||
"color": label.color,
|
||||
"id": label.id,
|
||||
}
|
||||
for issue_cycle in obj.issue_cycle.all()
|
||||
for label in issue_cycle.issue.labels.all()
|
||||
]
|
||||
# Use a set comprehension to return only the unique objects
|
||||
unique_objects = {frozenset(item.items()) for item in labels}
|
||||
|
||||
# Convert the set back to a list of dictionaries
|
||||
unique_list = [dict(item) for item in unique_objects]
|
||||
|
||||
return unique_list
|
||||
|
||||
class Meta:
|
||||
model = Cycle
|
||||
|
||||
26
apiserver/plane/api/serializers/exporter.py
Normal file
26
apiserver/plane/api/serializers/exporter.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import ExporterHistory
|
||||
from .user import UserLiteSerializer
|
||||
|
||||
|
||||
class ExporterHistorySerializer(BaseSerializer):
|
||||
initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ExporterHistory
|
||||
fields = [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"project",
|
||||
"provider",
|
||||
"status",
|
||||
"url",
|
||||
"initiated_by",
|
||||
"initiated_by_detail",
|
||||
"token",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = fields
|
||||
@@ -6,7 +6,6 @@ from .base import BaseSerializer
|
||||
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from .state import StateLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from plane.db.models import Inbox, InboxIssue, Issue
|
||||
|
||||
|
||||
@@ -5,4 +5,4 @@ from .github import (
|
||||
GithubIssueSyncSerializer,
|
||||
GithubCommentSyncSerializer,
|
||||
)
|
||||
from .slack import SlackProjectSyncSerializer
|
||||
from .slack import SlackProjectSyncSerializer
|
||||
|
||||
@@ -5,11 +5,10 @@ from django.utils import timezone
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .base import BaseSerializer, DynamicBaseSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from .state import StateSerializer, StateLiteSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from .project import ProjectSerializer, ProjectLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from .workspace import WorkspaceLiteSerializer
|
||||
from plane.db.models import (
|
||||
User,
|
||||
@@ -17,18 +16,20 @@ from plane.db.models import (
|
||||
IssueActivity,
|
||||
IssueComment,
|
||||
IssueProperty,
|
||||
IssueBlocker,
|
||||
IssueAssignee,
|
||||
IssueSubscriber,
|
||||
IssueLabel,
|
||||
Label,
|
||||
IssueBlocker,
|
||||
CycleIssue,
|
||||
Cycle,
|
||||
Module,
|
||||
ModuleIssue,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
IssueReaction,
|
||||
CommentReaction,
|
||||
IssueVote,
|
||||
IssueRelation,
|
||||
)
|
||||
|
||||
|
||||
@@ -47,9 +48,24 @@ class IssueFlatSerializer(BaseSerializer):
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"sort_order",
|
||||
"is_draft",
|
||||
]
|
||||
|
||||
|
||||
class IssueProjectLiteSerializer(BaseSerializer):
|
||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = [
|
||||
"id",
|
||||
"project_detail",
|
||||
"name",
|
||||
"sequence_id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
##TODO: Find a better way to write this serializer
|
||||
## Find a better approach to save manytomany?
|
||||
class IssueCreateSerializer(BaseSerializer):
|
||||
@@ -58,31 +74,18 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
|
||||
assignees_list = serializers.ListField(
|
||||
assignees = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
# List of issues that are blocking this issue
|
||||
blockers_list = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=Issue.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
labels_list = serializers.ListField(
|
||||
labels = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
# List of issues that are blocked by this issue
|
||||
blocks_list = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=Issue.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = "__all__"
|
||||
@@ -95,30 +98,34 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()]
|
||||
data['labels'] = [str(label.id) for label in instance.labels.all()]
|
||||
return data
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("target_date", None) is not None
|
||||
and data.get("start_date", None) > data.get("target_date", None)
|
||||
):
|
||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
blockers = validated_data.pop("blockers_list", None)
|
||||
assignees = validated_data.pop("assignees_list", None)
|
||||
labels = validated_data.pop("labels_list", None)
|
||||
blocks = validated_data.pop("blocks_list", None)
|
||||
assignees = validated_data.pop("assignees", None)
|
||||
labels = validated_data.pop("labels", None)
|
||||
|
||||
project = self.context["project"]
|
||||
issue = Issue.objects.create(**validated_data, project=project)
|
||||
project_id = self.context["project_id"]
|
||||
workspace_id = self.context["workspace_id"]
|
||||
default_assignee_id = self.context["default_assignee_id"]
|
||||
|
||||
if blockers is not None and len(blockers):
|
||||
IssueBlocker.objects.bulk_create(
|
||||
[
|
||||
IssueBlocker(
|
||||
block=issue,
|
||||
blocked_by=blocker,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
)
|
||||
for blocker in blockers
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
issue = Issue.objects.create(**validated_data, project_id=project_id)
|
||||
|
||||
# Issue Audit Users
|
||||
created_by_id = issue.created_by_id
|
||||
updated_by_id = issue.updated_by_id
|
||||
|
||||
if assignees is not None and len(assignees):
|
||||
IssueAssignee.objects.bulk_create(
|
||||
@@ -126,10 +133,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueAssignee(
|
||||
assignee=user,
|
||||
issue=issue,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for user in assignees
|
||||
],
|
||||
@@ -137,14 +144,14 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
)
|
||||
else:
|
||||
# Then assign it to default assignee
|
||||
if project.default_assignee is not None:
|
||||
if default_assignee_id is not None:
|
||||
IssueAssignee.objects.create(
|
||||
assignee=project.default_assignee,
|
||||
assignee_id=default_assignee_id,
|
||||
issue=issue,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
|
||||
if labels is not None and len(labels):
|
||||
@@ -153,56 +160,27 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueLabel(
|
||||
label=label,
|
||||
issue=issue,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
if blocks is not None and len(blocks):
|
||||
IssueBlocker.objects.bulk_create(
|
||||
[
|
||||
IssueBlocker(
|
||||
block=block,
|
||||
blocked_by=issue,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
)
|
||||
for block in blocks
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
return issue
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
blockers = validated_data.pop("blockers_list", None)
|
||||
assignees = validated_data.pop("assignees_list", None)
|
||||
labels = validated_data.pop("labels_list", None)
|
||||
blocks = validated_data.pop("blocks_list", None)
|
||||
assignees = validated_data.pop("assignees", None)
|
||||
labels = validated_data.pop("labels", None)
|
||||
|
||||
if blockers is not None:
|
||||
IssueBlocker.objects.filter(block=instance).delete()
|
||||
IssueBlocker.objects.bulk_create(
|
||||
[
|
||||
IssueBlocker(
|
||||
block=instance,
|
||||
blocked_by=blocker,
|
||||
project=instance.project,
|
||||
workspace=instance.project.workspace,
|
||||
created_by=instance.created_by,
|
||||
updated_by=instance.updated_by,
|
||||
)
|
||||
for blocker in blockers
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
# Related models
|
||||
project_id = instance.project_id
|
||||
workspace_id = instance.workspace_id
|
||||
created_by_id = instance.created_by_id
|
||||
updated_by_id = instance.updated_by_id
|
||||
|
||||
if assignees is not None:
|
||||
IssueAssignee.objects.filter(issue=instance).delete()
|
||||
@@ -211,10 +189,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueAssignee(
|
||||
assignee=user,
|
||||
issue=instance,
|
||||
project=instance.project,
|
||||
workspace=instance.project.workspace,
|
||||
created_by=instance.created_by,
|
||||
updated_by=instance.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for user in assignees
|
||||
],
|
||||
@@ -228,65 +206,31 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueLabel(
|
||||
label=label,
|
||||
issue=instance,
|
||||
project=instance.project,
|
||||
workspace=instance.project.workspace,
|
||||
created_by=instance.created_by,
|
||||
updated_by=instance.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
if blocks is not None:
|
||||
IssueBlocker.objects.filter(blocked_by=instance).delete()
|
||||
IssueBlocker.objects.bulk_create(
|
||||
[
|
||||
IssueBlocker(
|
||||
block=block,
|
||||
blocked_by=instance,
|
||||
project=instance.project,
|
||||
workspace=instance.project.workspace,
|
||||
created_by=instance.created_by,
|
||||
updated_by=instance.updated_by,
|
||||
)
|
||||
for block in blocks
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
# Time updation occues even when other related models are updated
|
||||
instance.updated_at = timezone.now()
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class IssueActivitySerializer(BaseSerializer):
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
|
||||
class Meta:
|
||||
model = IssueActivity
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class IssueCommentSerializer(BaseSerializer):
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
|
||||
class Meta:
|
||||
model = IssueComment
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
class IssuePropertySerializer(BaseSerializer):
|
||||
class Meta:
|
||||
@@ -323,7 +267,6 @@ class LabelLiteSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class IssueLabelSerializer(BaseSerializer):
|
||||
# label_details = LabelSerializer(read_only=True, source="label")
|
||||
|
||||
class Meta:
|
||||
model = IssueLabel
|
||||
@@ -334,20 +277,39 @@ class IssueLabelSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class BlockedIssueSerializer(BaseSerializer):
|
||||
blocked_issue_detail = IssueFlatSerializer(source="block", read_only=True)
|
||||
class IssueRelationSerializer(BaseSerializer):
|
||||
issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue")
|
||||
|
||||
class Meta:
|
||||
model = IssueBlocker
|
||||
fields = "__all__"
|
||||
model = IssueRelation
|
||||
fields = [
|
||||
"issue_detail",
|
||||
"relation_type",
|
||||
"related_issue",
|
||||
"issue",
|
||||
"id"
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
]
|
||||
|
||||
|
||||
class BlockerIssueSerializer(BaseSerializer):
|
||||
blocker_issue_detail = IssueFlatSerializer(source="blocked_by", read_only=True)
|
||||
class RelatedIssueSerializer(BaseSerializer):
|
||||
issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue")
|
||||
|
||||
class Meta:
|
||||
model = IssueBlocker
|
||||
fields = "__all__"
|
||||
model = IssueRelation
|
||||
fields = [
|
||||
"issue_detail",
|
||||
"relation_type",
|
||||
"related_issue",
|
||||
"issue",
|
||||
"id"
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
]
|
||||
|
||||
|
||||
class IssueAssigneeSerializer(BaseSerializer):
|
||||
@@ -460,11 +422,93 @@ class IssueAttachmentSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class IssueReactionSerializer(BaseSerializer):
|
||||
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
|
||||
class Meta:
|
||||
model = IssueReaction
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"actor",
|
||||
]
|
||||
|
||||
|
||||
class CommentReactionLiteSerializer(BaseSerializer):
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
|
||||
class Meta:
|
||||
model = CommentReaction
|
||||
fields = [
|
||||
"id",
|
||||
"reaction",
|
||||
"comment",
|
||||
"actor_detail",
|
||||
]
|
||||
|
||||
|
||||
class CommentReactionSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = CommentReaction
|
||||
fields = "__all__"
|
||||
read_only_fields = ["workspace", "project", "comment", "actor"]
|
||||
|
||||
|
||||
class IssueVoteSerializer(BaseSerializer):
|
||||
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
|
||||
class Meta:
|
||||
model = IssueVote
|
||||
fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class IssueCommentSerializer(BaseSerializer):
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True)
|
||||
is_member = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = IssueComment
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
class IssueStateFlatSerializer(BaseSerializer):
|
||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = [
|
||||
"id",
|
||||
"sequence_id",
|
||||
"name",
|
||||
"state_detail",
|
||||
"project_detail",
|
||||
]
|
||||
|
||||
|
||||
# Issue Serializer with state details
|
||||
class IssueStateSerializer(BaseSerializer):
|
||||
state_detail = StateSerializer(read_only=True, source="state")
|
||||
project_detail = ProjectSerializer(read_only=True, source="project")
|
||||
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
bridge_id = serializers.UUIDField(read_only=True)
|
||||
@@ -477,20 +521,19 @@ class IssueStateSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class IssueSerializer(BaseSerializer):
|
||||
project_detail = ProjectSerializer(read_only=True, source="project")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
state_detail = StateSerializer(read_only=True, source="state")
|
||||
parent_detail = IssueFlatSerializer(read_only=True, source="parent")
|
||||
parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
|
||||
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||
# List of issues blocked by this issue
|
||||
blocked_issues = BlockedIssueSerializer(read_only=True, many=True)
|
||||
# List of issues that block this issue
|
||||
blocker_issues = BlockerIssueSerializer(read_only=True, many=True)
|
||||
related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True)
|
||||
issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True)
|
||||
issue_cycle = IssueCycleDetailSerializer(read_only=True)
|
||||
issue_module = IssueModuleDetailSerializer(read_only=True)
|
||||
issue_link = IssueLinkSerializer(read_only=True, many=True)
|
||||
issue_attachment = IssueAttachmentSerializer(read_only=True, many=True)
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
@@ -505,7 +548,7 @@ class IssueSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class IssueLiteSerializer(BaseSerializer):
|
||||
class IssueLiteSerializer(DynamicBaseSerializer):
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||
@@ -516,6 +559,7 @@ class IssueLiteSerializer(BaseSerializer):
|
||||
module_id = serializers.UUIDField(read_only=True)
|
||||
attachment_count = serializers.IntegerField(read_only=True)
|
||||
link_count = serializers.IntegerField(read_only=True)
|
||||
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
@@ -533,6 +577,33 @@ class IssueLiteSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class IssuePublicSerializer(BaseSerializer):
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||
reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions")
|
||||
votes = IssueVoteSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"description_html",
|
||||
"sequence_id",
|
||||
"state",
|
||||
"state_detail",
|
||||
"project",
|
||||
"project_detail",
|
||||
"workspace",
|
||||
"priority",
|
||||
"target_date",
|
||||
"reactions",
|
||||
"votes",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
|
||||
class IssueSubscriberSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueSubscriber
|
||||
|
||||
@@ -4,9 +4,8 @@ from rest_framework import serializers
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from .project import ProjectSerializer, ProjectLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from .workspace import WorkspaceLiteSerializer
|
||||
from .issue import IssueStateSerializer
|
||||
|
||||
from plane.db.models import (
|
||||
User,
|
||||
@@ -19,7 +18,7 @@ from plane.db.models import (
|
||||
|
||||
|
||||
class ModuleWriteSerializer(BaseSerializer):
|
||||
members_list = serializers.ListField(
|
||||
members = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
@@ -39,9 +38,19 @@ class ModuleWriteSerializer(BaseSerializer):
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
data['members'] = [str(member.id) for member in instance.members.all()]
|
||||
return data
|
||||
|
||||
def validate(self, data):
|
||||
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
|
||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
members = validated_data.pop("members_list", None)
|
||||
members = validated_data.pop("members", None)
|
||||
|
||||
project = self.context["project"]
|
||||
|
||||
@@ -67,7 +76,7 @@ class ModuleWriteSerializer(BaseSerializer):
|
||||
return module
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
members = validated_data.pop("members_list", None)
|
||||
members = validated_data.pop("members", None)
|
||||
|
||||
if members is not None:
|
||||
ModuleMember.objects.filter(module=instance).delete()
|
||||
@@ -106,7 +115,7 @@ class ModuleFlatSerializer(BaseSerializer):
|
||||
|
||||
class ModuleIssueSerializer(BaseSerializer):
|
||||
module_detail = ModuleFlatSerializer(read_only=True, source="module")
|
||||
issue_detail = IssueStateSerializer(read_only=True, source="issue")
|
||||
issue_detail = ProjectLiteSerializer(read_only=True, source="issue")
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -151,7 +160,7 @@ class ModuleLinkSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class ModuleSerializer(BaseSerializer):
|
||||
project_detail = ProjectSerializer(read_only=True, source="project")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
lead_detail = UserLiteSerializer(read_only=True, source="lead")
|
||||
members_detail = UserLiteSerializer(read_only=True, many=True, source="members")
|
||||
link_module = ModuleLinkSerializer(read_only=True, many=True)
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from plane.db.models import Notification
|
||||
|
||||
class NotificationSerializer(BaseSerializer):
|
||||
triggered_by_details = UserLiteSerializer(read_only=True, source="triggered_by")
|
||||
|
||||
class Meta:
|
||||
model = Notification
|
||||
|
||||
@@ -3,7 +3,7 @@ from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .issue import IssueFlatSerializer, LabelSerializer
|
||||
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
||||
from .workspace import WorkspaceLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from plane.db.models import Page, PageBlock, PageFavorite, PageLabel, Label
|
||||
@@ -23,16 +23,22 @@ class PageBlockSerializer(BaseSerializer):
|
||||
"page",
|
||||
]
|
||||
|
||||
class PageBlockLiteSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = PageBlock
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class PageSerializer(BaseSerializer):
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
||||
labels_list = serializers.ListField(
|
||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
||||
labels = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
blocks = PageBlockSerializer(read_only=True, many=True)
|
||||
blocks = PageBlockLiteSerializer(read_only=True, many=True)
|
||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||
|
||||
@@ -44,9 +50,13 @@ class PageSerializer(BaseSerializer):
|
||||
"project",
|
||||
"owned_by",
|
||||
]
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
data['labels'] = [str(label.id) for label in instance.labels.all()]
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
labels = validated_data.pop("labels_list", None)
|
||||
labels = validated_data.pop("labels", None)
|
||||
project_id = self.context["project_id"]
|
||||
owned_by_id = self.context["owned_by_id"]
|
||||
page = Page.objects.create(
|
||||
@@ -71,7 +81,7 @@ class PageSerializer(BaseSerializer):
|
||||
return page
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
labels = validated_data.pop("labels_list", None)
|
||||
labels = validated_data.pop("labels", None)
|
||||
if labels is not None:
|
||||
PageLabel.objects.filter(page=instance).delete()
|
||||
PageLabel.objects.bulk_create(
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
Serializer,
|
||||
CharField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.authtoken.models import Token
|
||||
from rest_framework_simplejwt.tokens import RefreshToken
|
||||
|
||||
|
||||
from plane.db.models import User
|
||||
|
||||
|
||||
class UserSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = "__all__"
|
||||
extra_kwargs = {"password": {"write_only": True}}
|
||||
|
||||
|
||||
class ChangePasswordSerializer(Serializer):
|
||||
model = User
|
||||
|
||||
"""
|
||||
Serializer for password change endpoint.
|
||||
"""
|
||||
old_password = CharField(required=True)
|
||||
new_password = CharField(required=True)
|
||||
|
||||
|
||||
class ResetPasswordSerializer(Serializer):
|
||||
model = User
|
||||
|
||||
"""
|
||||
Serializer for password change endpoint.
|
||||
"""
|
||||
new_password = CharField(required=True)
|
||||
confirm_password = CharField(required=True)
|
||||
|
||||
|
||||
class TokenSerializer(ModelSerializer):
|
||||
|
||||
user = UserSerializer()
|
||||
access_token = SerializerMethodField()
|
||||
refresh_token = SerializerMethodField()
|
||||
|
||||
def get_access_token(self, obj):
|
||||
refresh_token = RefreshToken.for_user(obj.user)
|
||||
return str(refresh_token.access_token)
|
||||
|
||||
def get_refresh_token(self, obj):
|
||||
refresh_token = RefreshToken.for_user(obj.user)
|
||||
return str(refresh_token)
|
||||
|
||||
class Meta:
|
||||
model = Token
|
||||
fields = "__all__"
|
||||
@@ -1,19 +1,18 @@
|
||||
# Django imports
|
||||
from django.db import IntegrityError
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .base import BaseSerializer, DynamicBaseSerializer
|
||||
from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer
|
||||
from plane.api.serializers.user import UserLiteSerializer
|
||||
from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
ProjectMember,
|
||||
ProjectMemberInvite,
|
||||
ProjectIdentifier,
|
||||
ProjectFavorite,
|
||||
ProjectDeployBoard,
|
||||
ProjectPublicMember,
|
||||
)
|
||||
|
||||
|
||||
@@ -80,12 +79,48 @@ class ProjectSerializer(BaseSerializer):
|
||||
class ProjectLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = ["id", "identifier", "name"]
|
||||
fields = [
|
||||
"id",
|
||||
"identifier",
|
||||
"name",
|
||||
"cover_image",
|
||||
"icon_prop",
|
||||
"emoji",
|
||||
"description",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class ProjectListSerializer(DynamicBaseSerializer):
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
total_members = serializers.IntegerField(read_only=True)
|
||||
total_cycles = serializers.IntegerField(read_only=True)
|
||||
total_modules = serializers.IntegerField(read_only=True)
|
||||
is_member = serializers.BooleanField(read_only=True)
|
||||
sort_order = serializers.FloatField(read_only=True)
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
is_deployed = serializers.BooleanField(read_only=True)
|
||||
members = serializers.SerializerMethodField()
|
||||
|
||||
def get_members(self, obj):
|
||||
project_members = ProjectMember.objects.filter(
|
||||
project_id=obj.id,
|
||||
is_active=True,
|
||||
).values(
|
||||
"id",
|
||||
"member_id",
|
||||
"member__display_name",
|
||||
"member__avatar",
|
||||
)
|
||||
return list(project_members)
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ProjectDetailSerializer(BaseSerializer):
|
||||
workspace = WorkSpaceSerializer(read_only=True)
|
||||
# workspace = WorkSpaceSerializer(read_only=True)
|
||||
default_assignee = UserLiteSerializer(read_only=True)
|
||||
project_lead = UserLiteSerializer(read_only=True)
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
@@ -93,6 +128,9 @@ class ProjectDetailSerializer(BaseSerializer):
|
||||
total_cycles = serializers.IntegerField(read_only=True)
|
||||
total_modules = serializers.IntegerField(read_only=True)
|
||||
is_member = serializers.BooleanField(read_only=True)
|
||||
sort_order = serializers.FloatField(read_only=True)
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
is_deployed = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
@@ -100,7 +138,7 @@ class ProjectDetailSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class ProjectMemberSerializer(BaseSerializer):
|
||||
workspace = WorkSpaceSerializer(read_only=True)
|
||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||
project = ProjectLiteSerializer(read_only=True)
|
||||
member = UserLiteSerializer(read_only=True)
|
||||
|
||||
@@ -109,9 +147,19 @@ class ProjectMemberSerializer(BaseSerializer):
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ProjectMemberAdminSerializer(BaseSerializer):
|
||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||
project = ProjectLiteSerializer(read_only=True)
|
||||
member = UserAdminLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ProjectMember
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ProjectMemberInviteSerializer(BaseSerializer):
|
||||
project = ProjectSerializer(read_only=True)
|
||||
workspace = WorkSpaceSerializer(read_only=True)
|
||||
project = ProjectLiteSerializer(read_only=True)
|
||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ProjectMemberInvite
|
||||
@@ -125,8 +173,6 @@ class ProjectIdentifierSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class ProjectFavoriteSerializer(BaseSerializer):
|
||||
project_detail = ProjectSerializer(source="project", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ProjectFavorite
|
||||
fields = "__all__"
|
||||
@@ -136,13 +182,6 @@ class ProjectFavoriteSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class ProjectLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = ["id", "identifier", "name"]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class ProjectMemberLiteSerializer(BaseSerializer):
|
||||
member = UserLiteSerializer(read_only=True)
|
||||
is_subscribed = serializers.BooleanField(read_only=True)
|
||||
@@ -151,3 +190,28 @@ class ProjectMemberLiteSerializer(BaseSerializer):
|
||||
model = ProjectMember
|
||||
fields = ["member", "id", "is_subscribed"]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class ProjectDeployBoardSerializer(BaseSerializer):
|
||||
project_details = ProjectLiteSerializer(read_only=True, source="project")
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
|
||||
class Meta:
|
||||
model = ProjectDeployBoard
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"anchor",
|
||||
]
|
||||
|
||||
|
||||
class ProjectPublicMemberSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = ProjectPublicMember
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"member",
|
||||
]
|
||||
|
||||
@@ -7,8 +7,6 @@ from plane.db.models import State
|
||||
|
||||
|
||||
class StateSerializer(BaseSerializer):
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
|
||||
class Meta:
|
||||
model = State
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module import
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import User
|
||||
from plane.db.models import User, Workspace, WorkspaceMemberInvite
|
||||
|
||||
|
||||
class UserSerializer(BaseSerializer):
|
||||
@@ -30,6 +33,81 @@ class UserSerializer(BaseSerializer):
|
||||
return bool(obj.first_name) or bool(obj.last_name)
|
||||
|
||||
|
||||
class UserMeSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
"id",
|
||||
"avatar",
|
||||
"cover_image",
|
||||
"date_joined",
|
||||
"display_name",
|
||||
"email",
|
||||
"first_name",
|
||||
"last_name",
|
||||
"is_active",
|
||||
"is_bot",
|
||||
"is_email_verified",
|
||||
"is_managed",
|
||||
"is_onboarded",
|
||||
"is_tour_completed",
|
||||
"mobile_number",
|
||||
"role",
|
||||
"onboarding_step",
|
||||
"user_timezone",
|
||||
"username",
|
||||
"theme",
|
||||
"last_workspace_id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class UserMeSettingsSerializer(BaseSerializer):
|
||||
workspace = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
"id",
|
||||
"email",
|
||||
"workspace",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
def get_workspace(self, obj):
|
||||
workspace_invites = WorkspaceMemberInvite.objects.filter(
|
||||
email=obj.email
|
||||
).count()
|
||||
if obj.last_workspace_id is not None:
|
||||
workspace = Workspace.objects.filter(
|
||||
pk=obj.last_workspace_id, workspace_member__member=obj.id
|
||||
).first()
|
||||
return {
|
||||
"last_workspace_id": obj.last_workspace_id,
|
||||
"last_workspace_slug": workspace.slug if workspace is not None else "",
|
||||
"fallback_workspace_id": obj.last_workspace_id,
|
||||
"fallback_workspace_slug": workspace.slug if workspace is not None else "",
|
||||
"invites": workspace_invites,
|
||||
}
|
||||
else:
|
||||
fallback_workspace = (
|
||||
Workspace.objects.filter(workspace_member__member_id=obj.id)
|
||||
.order_by("created_at")
|
||||
.first()
|
||||
)
|
||||
return {
|
||||
"last_workspace_id": None,
|
||||
"last_workspace_slug": None,
|
||||
"fallback_workspace_id": fallback_workspace.id
|
||||
if fallback_workspace is not None
|
||||
else None,
|
||||
"fallback_workspace_slug": fallback_workspace.slug
|
||||
if fallback_workspace is not None
|
||||
else None,
|
||||
"invites": workspace_invites,
|
||||
}
|
||||
|
||||
|
||||
class UserLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
@@ -37,11 +115,49 @@ class UserLiteSerializer(BaseSerializer):
|
||||
"id",
|
||||
"first_name",
|
||||
"last_name",
|
||||
"email",
|
||||
"avatar",
|
||||
"is_bot",
|
||||
"display_name",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"is_bot",
|
||||
]
|
||||
|
||||
|
||||
class UserAdminLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
"id",
|
||||
"first_name",
|
||||
"last_name",
|
||||
"avatar",
|
||||
"is_bot",
|
||||
"display_name",
|
||||
"email",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"is_bot",
|
||||
]
|
||||
|
||||
|
||||
class ChangePasswordSerializer(serializers.Serializer):
|
||||
model = User
|
||||
|
||||
"""
|
||||
Serializer for password change endpoint.
|
||||
"""
|
||||
old_password = serializers.CharField(required=True)
|
||||
new_password = serializers.CharField(required=True)
|
||||
|
||||
|
||||
class ResetPasswordSerializer(serializers.Serializer):
|
||||
model = User
|
||||
|
||||
"""
|
||||
Serializer for password change endpoint.
|
||||
"""
|
||||
new_password = serializers.CharField(required=True)
|
||||
confirm_password = serializers.CharField(required=True)
|
||||
|
||||
@@ -5,10 +5,39 @@ from rest_framework import serializers
|
||||
from .base import BaseSerializer
|
||||
from .workspace import WorkspaceLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from plane.db.models import IssueView, IssueViewFavorite
|
||||
from plane.db.models import GlobalView, IssueView, IssueViewFavorite
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
|
||||
class GlobalViewSerializer(BaseSerializer):
|
||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = GlobalView
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"query",
|
||||
]
|
||||
|
||||
def create(self, validated_data):
|
||||
query_params = validated_data.get("query_data", {})
|
||||
if bool(query_params):
|
||||
validated_data["query"] = issue_filters(query_params, "POST")
|
||||
else:
|
||||
validated_data["query"] = dict()
|
||||
return GlobalView.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
query_params = validated_data.get("query_data", {})
|
||||
if bool(query_params):
|
||||
validated_data["query"] = issue_filters(query_params, "POST")
|
||||
else:
|
||||
validated_data["query"] = dict()
|
||||
validated_data["query"] = issue_filters(query_params, "PATCH")
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class IssueViewSerializer(BaseSerializer):
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||
@@ -28,7 +57,7 @@ class IssueViewSerializer(BaseSerializer):
|
||||
if bool(query_params):
|
||||
validated_data["query"] = issue_filters(query_params, "POST")
|
||||
else:
|
||||
validated_data["query"] = dict()
|
||||
validated_data["query"] = {}
|
||||
return IssueView.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
@@ -36,7 +65,7 @@ class IssueViewSerializer(BaseSerializer):
|
||||
if bool(query_params):
|
||||
validated_data["query"] = issue_filters(query_params, "POST")
|
||||
else:
|
||||
validated_data["query"] = dict()
|
||||
validated_data["query"] = {}
|
||||
validated_data["query"] = issue_filters(query_params, "PATCH")
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
30
apiserver/plane/api/serializers/webhook.py
Normal file
30
apiserver/plane/api/serializers/webhook.py
Normal file
@@ -0,0 +1,30 @@
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import DynamicBaseSerializer
|
||||
from plane.db.models import Webhook, WebhookLog
|
||||
from plane.db.models.webhook import validate_domain, validate_schema
|
||||
|
||||
class WebhookSerializer(DynamicBaseSerializer):
|
||||
url = serializers.URLField(validators=[validate_schema, validate_domain])
|
||||
|
||||
class Meta:
|
||||
model = Webhook
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"secret_key",
|
||||
]
|
||||
|
||||
|
||||
class WebhookLogSerializer(DynamicBaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = WebhookLog
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"webhook"
|
||||
]
|
||||
|
||||
@@ -3,7 +3,7 @@ from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from .user import UserLiteSerializer, UserAdminLiteSerializer
|
||||
|
||||
from plane.db.models import (
|
||||
User,
|
||||
@@ -33,10 +33,37 @@ class WorkSpaceSerializer(BaseSerializer):
|
||||
"owner",
|
||||
]
|
||||
|
||||
class WorkspaceLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Workspace
|
||||
fields = [
|
||||
"name",
|
||||
"slug",
|
||||
"id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
|
||||
class WorkSpaceMemberSerializer(BaseSerializer):
|
||||
member = UserLiteSerializer(read_only=True)
|
||||
workspace = WorkSpaceSerializer(read_only=True)
|
||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = WorkspaceMember
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class WorkspaceMemberMeSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = WorkspaceMember
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class WorkspaceMemberAdminSerializer(BaseSerializer):
|
||||
member = UserAdminLiteSerializer(read_only=True)
|
||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = WorkspaceMember
|
||||
@@ -83,9 +110,8 @@ class TeamSerializer(BaseSerializer):
|
||||
]
|
||||
TeamMember.objects.bulk_create(team_members, batch_size=10)
|
||||
return team
|
||||
else:
|
||||
team = Team.objects.create(**validated_data)
|
||||
return team
|
||||
team = Team.objects.create(**validated_data)
|
||||
return team
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
if "members" in validated_data:
|
||||
@@ -97,19 +123,7 @@ class TeamSerializer(BaseSerializer):
|
||||
]
|
||||
TeamMember.objects.bulk_create(team_members, batch_size=10)
|
||||
return super().update(instance, validated_data)
|
||||
else:
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class WorkspaceLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Workspace
|
||||
fields = [
|
||||
"name",
|
||||
"slug",
|
||||
"id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class WorkspaceThemeSerializer(BaseSerializer):
|
||||
|
||||
54
apiserver/plane/api/urls/__init__.py
Normal file
54
apiserver/plane/api/urls/__init__.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from .analytic import urlpatterns as analytic_urls
|
||||
from .asset import urlpatterns as asset_urls
|
||||
from .authentication import urlpatterns as authentication_urls
|
||||
from .config import urlpatterns as configuration_urls
|
||||
from .cycle import urlpatterns as cycle_urls
|
||||
from .estimate import urlpatterns as estimate_urls
|
||||
from .external import urlpatterns as external_urls
|
||||
from .importer import urlpatterns as importer_urls
|
||||
from .inbox import urlpatterns as inbox_urls
|
||||
from .integration import urlpatterns as integration_urls
|
||||
from .issue import urlpatterns as issue_urls
|
||||
from .module import urlpatterns as module_urls
|
||||
from .notification import urlpatterns as notification_urls
|
||||
from .page import urlpatterns as page_urls
|
||||
from .project import urlpatterns as project_urls
|
||||
from .public_board import urlpatterns as public_board_urls
|
||||
from .search import urlpatterns as search_urls
|
||||
from .state import urlpatterns as state_urls
|
||||
from .user import urlpatterns as user_urls
|
||||
from .views import urlpatterns as view_urls
|
||||
from .workspace import urlpatterns as workspace_urls
|
||||
from .api import urlpatterns as api_urls
|
||||
from .webhook import urlpatterns as webhook_urls
|
||||
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
*analytic_urls,
|
||||
*asset_urls,
|
||||
*authentication_urls,
|
||||
*configuration_urls,
|
||||
*cycle_urls,
|
||||
*estimate_urls,
|
||||
*external_urls,
|
||||
*importer_urls,
|
||||
*inbox_urls,
|
||||
*integration_urls,
|
||||
*issue_urls,
|
||||
*module_urls,
|
||||
*notification_urls,
|
||||
*page_urls,
|
||||
*project_urls,
|
||||
*public_board_urls,
|
||||
*search_urls,
|
||||
*state_urls,
|
||||
*user_urls,
|
||||
*view_urls,
|
||||
*workspace_urls,
|
||||
*api_urls,
|
||||
*webhook_urls,
|
||||
]
|
||||
46
apiserver/plane/api/urls/analytic.py
Normal file
46
apiserver/plane/api/urls/analytic.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
AnalyticsEndpoint,
|
||||
AnalyticViewViewset,
|
||||
SavedAnalyticEndpoint,
|
||||
ExportAnalyticsEndpoint,
|
||||
DefaultAnalyticsEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/analytics/",
|
||||
AnalyticsEndpoint.as_view(),
|
||||
name="plane-analytics",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/analytic-view/",
|
||||
AnalyticViewViewset.as_view({"get": "list", "post": "create"}),
|
||||
name="analytic-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/analytic-view/<uuid:pk>/",
|
||||
AnalyticViewViewset.as_view(
|
||||
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="analytic-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/saved-analytic-view/<uuid:analytic_id>/",
|
||||
SavedAnalyticEndpoint.as_view(),
|
||||
name="saved-analytic-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/export-analytics/",
|
||||
ExportAnalyticsEndpoint.as_view(),
|
||||
name="export-analytics",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/default-analytics/",
|
||||
DefaultAnalyticsEndpoint.as_view(),
|
||||
name="default-analytics",
|
||||
),
|
||||
]
|
||||
17
apiserver/plane/api/urls/api.py
Normal file
17
apiserver/plane/api/urls/api.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from django.urls import path
|
||||
from plane.api.views import ApiTokenEndpoint
|
||||
|
||||
urlpatterns = [
|
||||
# API Tokens
|
||||
path(
|
||||
"workspaces/<str:slug>/api-tokens/",
|
||||
ApiTokenEndpoint.as_view(),
|
||||
name="api-tokens",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/api-tokens/<uuid:pk>/",
|
||||
ApiTokenEndpoint.as_view(),
|
||||
name="api-tokens",
|
||||
),
|
||||
## End API Tokens
|
||||
]
|
||||
31
apiserver/plane/api/urls/asset.py
Normal file
31
apiserver/plane/api/urls/asset.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
FileAssetEndpoint,
|
||||
UserAssetsEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/file-assets/",
|
||||
FileAssetEndpoint.as_view(),
|
||||
name="file-assets",
|
||||
),
|
||||
path(
|
||||
"workspaces/file-assets/<uuid:workspace_id>/<str:asset_key>/",
|
||||
FileAssetEndpoint.as_view(),
|
||||
name="file-assets",
|
||||
),
|
||||
path(
|
||||
"users/file-assets/",
|
||||
UserAssetsEndpoint.as_view(),
|
||||
name="user-file-assets",
|
||||
),
|
||||
path(
|
||||
"users/file-assets/<str:asset_key>/",
|
||||
UserAssetsEndpoint.as_view(),
|
||||
name="user-file-assets",
|
||||
),
|
||||
]
|
||||
68
apiserver/plane/api/urls/authentication.py
Normal file
68
apiserver/plane/api/urls/authentication.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from django.urls import path
|
||||
|
||||
from rest_framework_simplejwt.views import TokenRefreshView
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
# Authentication
|
||||
SignUpEndpoint,
|
||||
SignInEndpoint,
|
||||
SignOutEndpoint,
|
||||
MagicSignInEndpoint,
|
||||
MagicSignInGenerateEndpoint,
|
||||
OauthEndpoint,
|
||||
## End Authentication
|
||||
# Auth Extended
|
||||
ForgotPasswordEndpoint,
|
||||
VerifyEmailEndpoint,
|
||||
ResetPasswordEndpoint,
|
||||
RequestEmailVerificationEndpoint,
|
||||
ChangePasswordEndpoint,
|
||||
## End Auth Extender
|
||||
# API Tokens
|
||||
ApiTokenEndpoint,
|
||||
## End API Tokens
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
# Social Auth
|
||||
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
|
||||
# Auth
|
||||
path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
|
||||
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
|
||||
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
|
||||
# Magic Sign In/Up
|
||||
path(
|
||||
"magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
|
||||
),
|
||||
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
|
||||
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
|
||||
# Email verification
|
||||
path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
|
||||
path(
|
||||
"request-email-verify/",
|
||||
RequestEmailVerificationEndpoint.as_view(),
|
||||
name="request-reset-email",
|
||||
),
|
||||
# Password Manipulation
|
||||
path(
|
||||
"users/me/change-password/",
|
||||
ChangePasswordEndpoint.as_view(),
|
||||
name="change-password",
|
||||
),
|
||||
path(
|
||||
"reset-password/<uidb64>/<token>/",
|
||||
ResetPasswordEndpoint.as_view(),
|
||||
name="password-reset",
|
||||
),
|
||||
path(
|
||||
"forgot-password/",
|
||||
ForgotPasswordEndpoint.as_view(),
|
||||
name="forgot-password",
|
||||
),
|
||||
# API Tokens
|
||||
path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
|
||||
path("api-tokens/<uuid:pk>/", ApiTokenEndpoint.as_view(), name="api-tokens"),
|
||||
## End API Tokens
|
||||
]
|
||||
12
apiserver/plane/api/urls/config.py
Normal file
12
apiserver/plane/api/urls/config.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import ConfigurationEndpoint
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"configs/",
|
||||
ConfigurationEndpoint.as_view(),
|
||||
name="configuration",
|
||||
),
|
||||
]
|
||||
87
apiserver/plane/api/urls/cycle.py
Normal file
87
apiserver/plane/api/urls/cycle.py
Normal file
@@ -0,0 +1,87 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
CycleViewSet,
|
||||
CycleIssueViewSet,
|
||||
CycleDateCheckEndpoint,
|
||||
CycleFavoriteViewSet,
|
||||
TransferCycleIssueEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
|
||||
CycleViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-cycle",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
|
||||
CycleViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-cycle",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
|
||||
CycleIssueViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-cycle",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:pk>/",
|
||||
CycleIssueViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-cycle",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/date-check/",
|
||||
CycleDateCheckEndpoint.as_view(),
|
||||
name="project-cycle-date",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/",
|
||||
CycleFavoriteViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="user-favorite-cycle",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/<uuid:cycle_id>/",
|
||||
CycleFavoriteViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="user-favorite-cycle",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
|
||||
TransferCycleIssueEndpoint.as_view(),
|
||||
name="transfer-issues",
|
||||
),
|
||||
]
|
||||
37
apiserver/plane/api/urls/estimate.py
Normal file
37
apiserver/plane/api/urls/estimate.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
ProjectEstimatePointEndpoint,
|
||||
BulkEstimatePointEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-estimates/",
|
||||
ProjectEstimatePointEndpoint.as_view(),
|
||||
name="project-estimate-points",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/",
|
||||
BulkEstimatePointEndpoint.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="bulk-create-estimate-points",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/<uuid:estimate_id>/",
|
||||
BulkEstimatePointEndpoint.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="bulk-create-estimate-points",
|
||||
),
|
||||
]
|
||||
25
apiserver/plane/api/urls/external.py
Normal file
25
apiserver/plane/api/urls/external.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import UnsplashEndpoint
|
||||
from plane.api.views import ReleaseNotesEndpoint
|
||||
from plane.api.views import GPTIntegrationEndpoint
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"unsplash/",
|
||||
UnsplashEndpoint.as_view(),
|
||||
name="unsplash",
|
||||
),
|
||||
path(
|
||||
"release-notes/",
|
||||
ReleaseNotesEndpoint.as_view(),
|
||||
name="release-notes",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
|
||||
GPTIntegrationEndpoint.as_view(),
|
||||
name="importer",
|
||||
),
|
||||
]
|
||||
37
apiserver/plane/api/urls/importer.py
Normal file
37
apiserver/plane/api/urls/importer.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
ServiceIssueImportSummaryEndpoint,
|
||||
ImportServiceEndpoint,
|
||||
UpdateServiceImportStatusEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/importers/<str:service>/",
|
||||
ServiceIssueImportSummaryEndpoint.as_view(),
|
||||
name="importer-summary",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/importers/<str:service>/",
|
||||
ImportServiceEndpoint.as_view(),
|
||||
name="importer",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/importers/",
|
||||
ImportServiceEndpoint.as_view(),
|
||||
name="importer",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/importers/<str:service>/<uuid:pk>/",
|
||||
ImportServiceEndpoint.as_view(),
|
||||
name="importer",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/service/<str:service>/importers/<uuid:importer_id>/",
|
||||
UpdateServiceImportStatusEndpoint.as_view(),
|
||||
name="importer-status",
|
||||
),
|
||||
]
|
||||
53
apiserver/plane/api/urls/inbox.py
Normal file
53
apiserver/plane/api/urls/inbox.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
InboxViewSet,
|
||||
InboxIssueViewSet,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
|
||||
InboxViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="inbox",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:pk>/",
|
||||
InboxViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="inbox",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
||||
InboxIssueViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
||||
InboxIssueViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
]
|
||||
150
apiserver/plane/api/urls/integration.py
Normal file
150
apiserver/plane/api/urls/integration.py
Normal file
@@ -0,0 +1,150 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
IntegrationViewSet,
|
||||
WorkspaceIntegrationViewSet,
|
||||
GithubRepositoriesEndpoint,
|
||||
GithubRepositorySyncViewSet,
|
||||
GithubIssueSyncViewSet,
|
||||
GithubCommentSyncViewSet,
|
||||
BulkCreateGithubIssueSyncEndpoint,
|
||||
SlackProjectSyncViewSet,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"integrations/",
|
||||
IntegrationViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="integrations",
|
||||
),
|
||||
path(
|
||||
"integrations/<uuid:pk>/",
|
||||
IntegrationViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="integrations",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-integrations/",
|
||||
WorkspaceIntegrationViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
name="workspace-integrations",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-integrations/<str:provider>/",
|
||||
WorkspaceIntegrationViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="workspace-integrations",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-integrations/<uuid:pk>/provider/",
|
||||
WorkspaceIntegrationViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="workspace-integrations",
|
||||
),
|
||||
# Github Integrations
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-integrations/<uuid:workspace_integration_id>/github-repositories/",
|
||||
GithubRepositoriesEndpoint.as_view(),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/",
|
||||
GithubRepositorySyncViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/<uuid:pk>/",
|
||||
GithubRepositorySyncViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/",
|
||||
GithubIssueSyncViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/bulk-create-github-issue-sync/",
|
||||
BulkCreateGithubIssueSyncEndpoint.as_view(),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
|
||||
GithubIssueSyncViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/",
|
||||
GithubCommentSyncViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/<uuid:pk>/",
|
||||
GithubCommentSyncViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
),
|
||||
## End Github Integrations
|
||||
# Slack Integration
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/",
|
||||
SlackProjectSyncViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/<uuid:pk>/",
|
||||
SlackProjectSyncViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
"get": "retrieve",
|
||||
}
|
||||
),
|
||||
),
|
||||
## End Slack Integration
|
||||
]
|
||||
327
apiserver/plane/api/urls/issue.py
Normal file
327
apiserver/plane/api/urls/issue.py
Normal file
@@ -0,0 +1,327 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
IssueViewSet,
|
||||
IssueListEndpoint,
|
||||
IssueListGroupedEndpoint,
|
||||
LabelViewSet,
|
||||
BulkCreateIssueLabelsEndpoint,
|
||||
BulkDeleteIssuesEndpoint,
|
||||
BulkImportIssuesEndpoint,
|
||||
UserWorkSpaceIssues,
|
||||
SubIssuesEndpoint,
|
||||
IssueLinkViewSet,
|
||||
IssueAttachmentEndpoint,
|
||||
ExportIssuesEndpoint,
|
||||
IssueActivityEndpoint,
|
||||
IssueCommentViewSet,
|
||||
IssueSubscriberViewSet,
|
||||
IssueReactionViewSet,
|
||||
CommentReactionViewSet,
|
||||
IssueUserDisplayPropertyEndpoint,
|
||||
IssueArchiveViewSet,
|
||||
IssueRelationViewSet,
|
||||
IssueDraftViewSet,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
||||
IssueViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue",
|
||||
),
|
||||
path(
|
||||
"v2/workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
||||
IssueListEndpoint.as_view(),
|
||||
name="project-issue",
|
||||
),
|
||||
path(
|
||||
"v3/workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
||||
IssueListGroupedEndpoint.as_view(),
|
||||
name="project-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
|
||||
IssueViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/",
|
||||
LabelViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-labels",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/<uuid:pk>/",
|
||||
LabelViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-labels",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-create-labels/",
|
||||
BulkCreateIssueLabelsEndpoint.as_view(),
|
||||
name="project-bulk-labels",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-delete-issues/",
|
||||
BulkDeleteIssuesEndpoint.as_view(),
|
||||
name="project-issues-bulk",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
|
||||
BulkImportIssuesEndpoint.as_view(),
|
||||
name="project-issues-bulk",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/my-issues/",
|
||||
UserWorkSpaceIssues.as_view(),
|
||||
name="workspace-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
|
||||
SubIssuesEndpoint.as_view(),
|
||||
name="sub-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/",
|
||||
IssueLinkViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-links",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/<uuid:pk>/",
|
||||
IssueLinkViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-links",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/",
|
||||
IssueAttachmentEndpoint.as_view(),
|
||||
name="project-issue-attachments",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/<uuid:pk>/",
|
||||
IssueAttachmentEndpoint.as_view(),
|
||||
name="project-issue-attachments",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/export-issues/",
|
||||
ExportIssuesEndpoint.as_view(),
|
||||
name="export-issues",
|
||||
),
|
||||
## End Issues
|
||||
## Issue Activity
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/history/",
|
||||
IssueActivityEndpoint.as_view(),
|
||||
name="project-issue-history",
|
||||
),
|
||||
## Issue Activity
|
||||
## IssueComments
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
||||
IssueCommentViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-comment",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
||||
IssueCommentViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-comment",
|
||||
),
|
||||
## End IssueComments
|
||||
# Issue Subscribers
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/",
|
||||
IssueSubscriberViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-subscribers",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/<uuid:subscriber_id>/",
|
||||
IssueSubscriberViewSet.as_view({"delete": "destroy"}),
|
||||
name="project-issue-subscribers",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/subscribe/",
|
||||
IssueSubscriberViewSet.as_view(
|
||||
{
|
||||
"get": "subscription_status",
|
||||
"post": "subscribe",
|
||||
"delete": "unsubscribe",
|
||||
}
|
||||
),
|
||||
name="project-issue-subscribers",
|
||||
),
|
||||
## End Issue Subscribers
|
||||
# Issue Reactions
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
||||
IssueReactionViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-reactions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
||||
IssueReactionViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-reactions",
|
||||
),
|
||||
## End Issue Reactions
|
||||
# Comment Reactions
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
||||
CommentReactionViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-comment-reactions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
||||
CommentReactionViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-comment-reactions",
|
||||
),
|
||||
## End Comment Reactions
|
||||
## IssueProperty
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-display-properties/",
|
||||
IssueUserDisplayPropertyEndpoint.as_view(),
|
||||
name="project-issue-display-properties",
|
||||
),
|
||||
## IssueProperty End
|
||||
## Issue Archives
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
|
||||
IssueArchiveViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
name="project-issue-archive",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/<uuid:pk>/",
|
||||
IssueArchiveViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-archive",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/unarchive/<uuid:pk>/",
|
||||
IssueArchiveViewSet.as_view(
|
||||
{
|
||||
"post": "unarchive",
|
||||
}
|
||||
),
|
||||
name="project-issue-archive",
|
||||
),
|
||||
## End Issue Archives
|
||||
## Issue Relation
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/",
|
||||
IssueRelationViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="issue-relation",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/<uuid:pk>/",
|
||||
IssueRelationViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="issue-relation",
|
||||
),
|
||||
## End Issue Relation
|
||||
## Issue Drafts
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/",
|
||||
IssueDraftViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-draft",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/<uuid:pk>/",
|
||||
IssueDraftViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-draft",
|
||||
),
|
||||
]
|
||||
104
apiserver/plane/api/urls/module.py
Normal file
104
apiserver/plane/api/urls/module.py
Normal file
@@ -0,0 +1,104 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
ModuleViewSet,
|
||||
ModuleIssueViewSet,
|
||||
ModuleLinkViewSet,
|
||||
ModuleFavoriteViewSet,
|
||||
BulkImportModulesEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
|
||||
ModuleViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-modules",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
|
||||
ModuleViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-modules",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
|
||||
ModuleIssueViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-module-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:pk>/",
|
||||
ModuleIssueViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-module-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/",
|
||||
ModuleLinkViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-module-links",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/<uuid:pk>/",
|
||||
ModuleLinkViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-module-links",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/",
|
||||
ModuleFavoriteViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="user-favorite-module",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/<uuid:module_id>/",
|
||||
ModuleFavoriteViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="user-favorite-module",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-modules/<str:service>/",
|
||||
BulkImportModulesEndpoint.as_view(),
|
||||
name="bulk-modules-create",
|
||||
),
|
||||
]
|
||||
66
apiserver/plane/api/urls/notification.py
Normal file
66
apiserver/plane/api/urls/notification.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
NotificationViewSet,
|
||||
UnreadNotificationEndpoint,
|
||||
MarkAllReadNotificationViewSet,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/",
|
||||
NotificationViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
name="notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/<uuid:pk>/",
|
||||
NotificationViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/<uuid:pk>/read/",
|
||||
NotificationViewSet.as_view(
|
||||
{
|
||||
"post": "mark_read",
|
||||
"delete": "mark_unread",
|
||||
}
|
||||
),
|
||||
name="notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/<uuid:pk>/archive/",
|
||||
NotificationViewSet.as_view(
|
||||
{
|
||||
"post": "archive",
|
||||
"delete": "unarchive",
|
||||
}
|
||||
),
|
||||
name="notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/unread/",
|
||||
UnreadNotificationEndpoint.as_view(),
|
||||
name="unread-notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/mark-all-read/",
|
||||
MarkAllReadNotificationViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="mark-all-read-notifications",
|
||||
),
|
||||
]
|
||||
79
apiserver/plane/api/urls/page.py
Normal file
79
apiserver/plane/api/urls/page.py
Normal file
@@ -0,0 +1,79 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
PageViewSet,
|
||||
PageBlockViewSet,
|
||||
PageFavoriteViewSet,
|
||||
CreateIssueFromPageBlockEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/",
|
||||
PageViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-pages",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/",
|
||||
PageViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-pages",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/",
|
||||
PageBlockViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-page-blocks",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:pk>/",
|
||||
PageBlockViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-page-blocks",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/",
|
||||
PageFavoriteViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="user-favorite-pages",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/<uuid:page_id>/",
|
||||
PageFavoriteViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="user-favorite-pages",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:page_block_id>/issues/",
|
||||
CreateIssueFromPageBlockEndpoint.as_view(),
|
||||
name="page-block-issues",
|
||||
),
|
||||
]
|
||||
150
apiserver/plane/api/urls/project.py
Normal file
150
apiserver/plane/api/urls/project.py
Normal file
@@ -0,0 +1,150 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views import (
|
||||
ProjectViewSet,
|
||||
ProjectInvitationsViewset,
|
||||
ProjectMemberViewSet,
|
||||
ProjectMemberUserEndpoint,
|
||||
ProjectJoinEndpoint,
|
||||
AddTeamToProjectEndpoint,
|
||||
ProjectUserViewsEndpoint,
|
||||
ProjectIdentifierEndpoint,
|
||||
ProjectFavoritesViewSet,
|
||||
ProjectPublicCoverImagesEndpoint,
|
||||
UserProjectInvitationsViewset,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/",
|
||||
ProjectViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:pk>/",
|
||||
ProjectViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/project-identifiers/",
|
||||
ProjectIdentifierEndpoint.as_view(),
|
||||
name="project-identifiers",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/",
|
||||
ProjectInvitationsViewset.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
},
|
||||
),
|
||||
name="project-member-invite",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/<uuid:pk>/",
|
||||
ProjectInvitationsViewset.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-member-invite",
|
||||
),
|
||||
path(
|
||||
"users/me/invitations/projects/",
|
||||
UserProjectInvitationsViewset.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
},
|
||||
),
|
||||
name="user-project-invitations",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/join/",
|
||||
ProjectJoinEndpoint.as_view(),
|
||||
name="project-join",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/members/",
|
||||
ProjectMemberViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-member",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/members/<uuid:pk>/",
|
||||
ProjectMemberViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-member",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/members/leave/",
|
||||
ProjectMemberViewSet.as_view(
|
||||
{
|
||||
"post": "leave",
|
||||
}
|
||||
),
|
||||
name="project-member",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/team-invite/",
|
||||
AddTeamToProjectEndpoint.as_view(),
|
||||
name="projects",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-views/",
|
||||
ProjectUserViewsEndpoint.as_view(),
|
||||
name="project-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/me/",
|
||||
ProjectMemberUserEndpoint.as_view(),
|
||||
name="project-member-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-favorite-projects/",
|
||||
ProjectFavoritesViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-favorite",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-favorite-projects/<uuid:project_id>/",
|
||||
ProjectFavoritesViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-favorite",
|
||||
),
|
||||
path(
|
||||
"project-covers/",
|
||||
ProjectPublicCoverImagesEndpoint.as_view(),
|
||||
name="project-covers",
|
||||
),
|
||||
]
|
||||
151
apiserver/plane/api/urls/public_board.py
Normal file
151
apiserver/plane/api/urls/public_board.py
Normal file
@@ -0,0 +1,151 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
ProjectDeployBoardViewSet,
|
||||
ProjectDeployBoardPublicSettingsEndpoint,
|
||||
ProjectIssuesPublicEndpoint,
|
||||
IssueRetrievePublicEndpoint,
|
||||
IssueCommentPublicViewSet,
|
||||
IssueReactionPublicViewSet,
|
||||
CommentReactionPublicViewSet,
|
||||
InboxIssuePublicViewSet,
|
||||
IssueVotePublicViewSet,
|
||||
WorkspaceProjectDeployBoardEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/",
|
||||
ProjectDeployBoardViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/<uuid:pk>/",
|
||||
ProjectDeployBoardViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/settings/",
|
||||
ProjectDeployBoardPublicSettingsEndpoint.as_view(),
|
||||
name="project-deploy-board-settings",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/",
|
||||
ProjectIssuesPublicEndpoint.as_view(),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/",
|
||||
IssueRetrievePublicEndpoint.as_view(),
|
||||
name="workspace-project-boards",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
||||
IssueCommentPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="issue-comments-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
||||
IssueCommentPublicViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="issue-comments-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
||||
IssueReactionPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="issue-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
||||
IssueReactionPublicViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="issue-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
||||
CommentReactionPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="comment-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
||||
CommentReactionPublicViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="comment-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
||||
InboxIssuePublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
||||
InboxIssuePublicViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/votes/",
|
||||
IssueVotePublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="issue-vote-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/",
|
||||
WorkspaceProjectDeployBoardEndpoint.as_view(),
|
||||
name="workspace-project-boards",
|
||||
),
|
||||
]
|
||||
21
apiserver/plane/api/urls/search.py
Normal file
21
apiserver/plane/api/urls/search.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
GlobalSearchEndpoint,
|
||||
IssueSearchEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/search/",
|
||||
GlobalSearchEndpoint.as_view(),
|
||||
name="global-search",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/search-issues/",
|
||||
IssueSearchEndpoint.as_view(),
|
||||
name="project-issue-search",
|
||||
),
|
||||
]
|
||||
38
apiserver/plane/api/urls/state.py
Normal file
38
apiserver/plane/api/urls/state.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import StateViewSet
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/",
|
||||
StateViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-states",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:pk>/",
|
||||
StateViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-state",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:pk>/mark-default/",
|
||||
StateViewSet.as_view(
|
||||
{
|
||||
"post": "mark_as_default",
|
||||
}
|
||||
),
|
||||
name="project-state",
|
||||
),
|
||||
]
|
||||
84
apiserver/plane/api/urls/user.py
Normal file
84
apiserver/plane/api/urls/user.py
Normal file
@@ -0,0 +1,84 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views import (
|
||||
## User
|
||||
UserEndpoint,
|
||||
UpdateUserOnBoardedEndpoint,
|
||||
UpdateUserTourCompletedEndpoint,
|
||||
UserActivityEndpoint,
|
||||
ChangePasswordEndpoint,
|
||||
## End User
|
||||
## Workspaces
|
||||
UserWorkSpacesEndpoint,
|
||||
UserActivityGraphEndpoint,
|
||||
UserIssueCompletedGraphEndpoint,
|
||||
UserWorkspaceDashboardEndpoint,
|
||||
## End Workspaces
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
# User Profile
|
||||
path(
|
||||
"users/me/",
|
||||
UserEndpoint.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "deactivate",
|
||||
}
|
||||
),
|
||||
name="users",
|
||||
),
|
||||
path(
|
||||
"users/me/settings/",
|
||||
UserEndpoint.as_view(
|
||||
{
|
||||
"get": "retrieve_user_settings",
|
||||
}
|
||||
),
|
||||
name="users",
|
||||
),
|
||||
path(
|
||||
"users/me/change-password/",
|
||||
ChangePasswordEndpoint.as_view(),
|
||||
name="change-password",
|
||||
),
|
||||
path(
|
||||
"users/me/onboard/",
|
||||
UpdateUserOnBoardedEndpoint.as_view(),
|
||||
name="user-onboard",
|
||||
),
|
||||
path(
|
||||
"users/me/tour-completed/",
|
||||
UpdateUserTourCompletedEndpoint.as_view(),
|
||||
name="user-tour",
|
||||
),
|
||||
path(
|
||||
"users/workspaces/<str:slug>/activities/",
|
||||
UserActivityEndpoint.as_view(),
|
||||
name="user-activities",
|
||||
),
|
||||
# user workspaces
|
||||
path(
|
||||
"users/me/workspaces/",
|
||||
UserWorkSpacesEndpoint.as_view(),
|
||||
name="user-workspace",
|
||||
),
|
||||
# User Graphs
|
||||
path(
|
||||
"users/me/workspaces/<str:slug>/activity-graph/",
|
||||
UserActivityGraphEndpoint.as_view(),
|
||||
name="user-activity-graph",
|
||||
),
|
||||
path(
|
||||
"users/me/workspaces/<str:slug>/issues-completed-graph/",
|
||||
UserIssueCompletedGraphEndpoint.as_view(),
|
||||
name="completed-graph",
|
||||
),
|
||||
path(
|
||||
"users/me/workspaces/<str:slug>/dashboard/",
|
||||
UserWorkspaceDashboardEndpoint.as_view(),
|
||||
name="user-workspace-dashboard",
|
||||
),
|
||||
## End User Graph
|
||||
]
|
||||
85
apiserver/plane/api/urls/views.py
Normal file
85
apiserver/plane/api/urls/views.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
IssueViewViewSet,
|
||||
GlobalViewViewSet,
|
||||
GlobalViewIssuesViewSet,
|
||||
IssueViewFavoriteViewSet,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/views/",
|
||||
IssueViewViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/views/<uuid:pk>/",
|
||||
IssueViewViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/views/",
|
||||
GlobalViewViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="global-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/views/<uuid:pk>/",
|
||||
GlobalViewViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="global-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/issues/",
|
||||
GlobalViewIssuesViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
name="global-view-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-views/",
|
||||
IssueViewFavoriteViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="user-favorite-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-views/<uuid:view_id>/",
|
||||
IssueViewFavoriteViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="user-favorite-view",
|
||||
),
|
||||
]
|
||||
31
apiserver/plane/api/urls/webhook.py
Normal file
31
apiserver/plane/api/urls/webhook.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views import (
|
||||
WebhookEndpoint,
|
||||
WebhookLogsEndpoint,
|
||||
WebhookSecretRegenerateEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/webhooks/",
|
||||
WebhookEndpoint.as_view(),
|
||||
name="webhooks",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/webhooks/<uuid:pk>/",
|
||||
WebhookEndpoint.as_view(),
|
||||
name="webhooks",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/webhooks/<uuid:pk>/regenerate/",
|
||||
WebhookSecretRegenerateEndpoint.as_view(),
|
||||
name="webhooks",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/webhook-logs/<uuid:webhook_id>/",
|
||||
WebhookLogsEndpoint.as_view(),
|
||||
name="webhooks",
|
||||
),
|
||||
]
|
||||
197
apiserver/plane/api/urls/workspace.py
Normal file
197
apiserver/plane/api/urls/workspace.py
Normal file
@@ -0,0 +1,197 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.api.views import (
|
||||
UserWorkspaceInvitationsViewSet,
|
||||
WorkSpaceViewSet,
|
||||
WorkspaceJoinEndpoint,
|
||||
WorkSpaceMemberViewSet,
|
||||
WorkspaceInvitationsViewset,
|
||||
WorkspaceMemberUserEndpoint,
|
||||
WorkspaceMemberUserViewsEndpoint,
|
||||
WorkSpaceAvailabilityCheckEndpoint,
|
||||
TeamMemberViewSet,
|
||||
UserLastProjectWithWorkspaceEndpoint,
|
||||
WorkspaceThemeViewSet,
|
||||
WorkspaceUserProfileStatsEndpoint,
|
||||
WorkspaceUserActivityEndpoint,
|
||||
WorkspaceUserProfileEndpoint,
|
||||
WorkspaceUserProfileIssuesEndpoint,
|
||||
WorkspaceLabelsEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspace-slug-check/",
|
||||
WorkSpaceAvailabilityCheckEndpoint.as_view(),
|
||||
name="workspace-availability",
|
||||
),
|
||||
path(
|
||||
"workspaces/",
|
||||
WorkSpaceViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="workspace",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/",
|
||||
WorkSpaceViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="workspace",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/invitations/",
|
||||
WorkspaceInvitationsViewset.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
},
|
||||
),
|
||||
name="workspace-invitations",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/invitations/<uuid:pk>/",
|
||||
WorkspaceInvitationsViewset.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
"get": "retrieve",
|
||||
}
|
||||
),
|
||||
name="workspace-invitations",
|
||||
),
|
||||
# user workspace invitations
|
||||
path(
|
||||
"users/me/workspaces/invitations/",
|
||||
UserWorkspaceInvitationsViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
},
|
||||
),
|
||||
name="user-workspace-invitations",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/invitations/<uuid:pk>/join/",
|
||||
WorkspaceJoinEndpoint.as_view(),
|
||||
name="workspace-join",
|
||||
),
|
||||
# user join workspace
|
||||
path(
|
||||
"workspaces/<str:slug>/members/",
|
||||
WorkSpaceMemberViewSet.as_view({"get": "list"}),
|
||||
name="workspace-member",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/members/<uuid:pk>/",
|
||||
WorkSpaceMemberViewSet.as_view(
|
||||
{
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
"get": "retrieve",
|
||||
}
|
||||
),
|
||||
name="workspace-member",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/members/leave/",
|
||||
WorkSpaceMemberViewSet.as_view(
|
||||
{
|
||||
"post": "leave",
|
||||
},
|
||||
),
|
||||
name="leave-workspace-members",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/teams/",
|
||||
TeamMemberViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="workspace-team-members",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/teams/<uuid:pk>/",
|
||||
TeamMemberViewSet.as_view(
|
||||
{
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
"get": "retrieve",
|
||||
}
|
||||
),
|
||||
name="workspace-team-members",
|
||||
),
|
||||
path(
|
||||
"users/last-visited-workspace/",
|
||||
UserLastProjectWithWorkspaceEndpoint.as_view(),
|
||||
name="workspace-project-details",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-members/me/",
|
||||
WorkspaceMemberUserEndpoint.as_view(),
|
||||
name="workspace-member-details",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-views/",
|
||||
WorkspaceMemberUserViewsEndpoint.as_view(),
|
||||
name="workspace-member-views-details",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-themes/",
|
||||
WorkspaceThemeViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="workspace-themes",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-themes/<uuid:pk>/",
|
||||
WorkspaceThemeViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="workspace-themes",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-stats/<uuid:user_id>/",
|
||||
WorkspaceUserProfileStatsEndpoint.as_view(),
|
||||
name="workspace-user-stats",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-activity/<uuid:user_id>/",
|
||||
WorkspaceUserActivityEndpoint.as_view(),
|
||||
name="workspace-user-activity",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-profile/<uuid:user_id>/",
|
||||
WorkspaceUserProfileEndpoint.as_view(),
|
||||
name="workspace-user-profile-page",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-issues/<uuid:user_id>/",
|
||||
WorkspaceUserProfileIssuesEndpoint.as_view(),
|
||||
name="workspace-user-profile-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/labels/",
|
||||
WorkspaceLabelsEndpoint.as_view(),
|
||||
name="workspace-labels",
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.urls import path
|
||||
|
||||
from rest_framework_simplejwt.views import TokenRefreshView
|
||||
|
||||
# Create your urls here.
|
||||
|
||||
@@ -22,15 +23,16 @@ from plane.api.views import (
|
||||
# User
|
||||
UserEndpoint,
|
||||
UpdateUserOnBoardedEndpoint,
|
||||
UpdateUserTourCompletedEndpoint,
|
||||
UserActivityEndpoint,
|
||||
## End User
|
||||
# Workspaces
|
||||
WorkSpaceViewSet,
|
||||
UserWorkspaceInvitationsEndpoint,
|
||||
UserWorkSpacesEndpoint,
|
||||
InviteWorkspaceEndpoint,
|
||||
JoinWorkspaceEndpoint,
|
||||
WorkSpaceMemberViewSet,
|
||||
WorkspaceMembersEndpoint,
|
||||
WorkspaceInvitationsViewset,
|
||||
UserWorkspaceInvitationsEndpoint,
|
||||
WorkspaceMemberUserEndpoint,
|
||||
@@ -44,6 +46,12 @@ from plane.api.views import (
|
||||
UserIssueCompletedGraphEndpoint,
|
||||
UserWorkspaceDashboardEndpoint,
|
||||
WorkspaceThemeViewSet,
|
||||
WorkspaceUserProfileStatsEndpoint,
|
||||
WorkspaceUserActivityEndpoint,
|
||||
WorkspaceUserProfileEndpoint,
|
||||
WorkspaceUserProfileIssuesEndpoint,
|
||||
WorkspaceLabelsEndpoint,
|
||||
LeaveWorkspaceEndpoint,
|
||||
## End Workspaces
|
||||
# File Assets
|
||||
FileAssetEndpoint,
|
||||
@@ -53,6 +61,7 @@ from plane.api.views import (
|
||||
ProjectViewSet,
|
||||
InviteProjectEndpoint,
|
||||
ProjectMemberViewSet,
|
||||
ProjectMemberEndpoint,
|
||||
ProjectMemberInvitationsViewset,
|
||||
ProjectMemberUserEndpoint,
|
||||
AddMemberToProjectEndpoint,
|
||||
@@ -60,6 +69,8 @@ from plane.api.views import (
|
||||
UserProjectInvitationsViewset,
|
||||
ProjectIdentifierEndpoint,
|
||||
ProjectFavoritesViewSet,
|
||||
LeaveProjectEndpoint,
|
||||
ProjectPublicCoverImagesEndpoint,
|
||||
## End Projects
|
||||
# Issues
|
||||
IssueViewSet,
|
||||
@@ -70,7 +81,7 @@ from plane.api.views import (
|
||||
BulkDeleteIssuesEndpoint,
|
||||
BulkImportIssuesEndpoint,
|
||||
ProjectUserViewsEndpoint,
|
||||
IssuePropertyViewSet,
|
||||
IssueUserDisplayPropertyEndpoint,
|
||||
LabelViewSet,
|
||||
SubIssuesEndpoint,
|
||||
IssueLinkViewSet,
|
||||
@@ -78,6 +89,11 @@ from plane.api.views import (
|
||||
IssueAttachmentEndpoint,
|
||||
IssueArchiveViewSet,
|
||||
IssueSubscriberViewSet,
|
||||
IssueCommentPublicViewSet,
|
||||
IssueReactionViewSet,
|
||||
IssueRelationViewSet,
|
||||
CommentReactionViewSet,
|
||||
IssueDraftViewSet,
|
||||
## End Issues
|
||||
# States
|
||||
StateViewSet,
|
||||
@@ -87,8 +103,9 @@ from plane.api.views import (
|
||||
BulkEstimatePointEndpoint,
|
||||
## End Estimates
|
||||
# Views
|
||||
GlobalViewViewSet,
|
||||
GlobalViewIssuesViewSet,
|
||||
IssueViewViewSet,
|
||||
ViewIssuesEndpoint,
|
||||
IssueViewFavoriteViewSet,
|
||||
## End Views
|
||||
# Cycles
|
||||
@@ -133,12 +150,11 @@ from plane.api.views import (
|
||||
GlobalSearchEndpoint,
|
||||
IssueSearchEndpoint,
|
||||
## End Search
|
||||
# Gpt
|
||||
# External
|
||||
GPTIntegrationEndpoint,
|
||||
## End Gpt
|
||||
# Release Notes
|
||||
ReleaseNotesEndpoint,
|
||||
## End Release Notes
|
||||
UnsplashEndpoint,
|
||||
## End External
|
||||
# Inbox
|
||||
InboxViewSet,
|
||||
InboxIssueViewSet,
|
||||
@@ -152,10 +168,32 @@ from plane.api.views import (
|
||||
## End Analytics
|
||||
# Notification
|
||||
NotificationViewSet,
|
||||
UnreadNotificationEndpoint,
|
||||
MarkAllReadNotificationViewSet,
|
||||
## End Notification
|
||||
# Public Boards
|
||||
ProjectDeployBoardViewSet,
|
||||
ProjectIssuesPublicEndpoint,
|
||||
ProjectDeployBoardPublicSettingsEndpoint,
|
||||
IssueReactionPublicViewSet,
|
||||
CommentReactionPublicViewSet,
|
||||
InboxIssuePublicViewSet,
|
||||
IssueVotePublicViewSet,
|
||||
WorkspaceProjectDeployBoardEndpoint,
|
||||
IssueRetrievePublicEndpoint,
|
||||
## End Public Boards
|
||||
## Exporter
|
||||
ExportIssuesEndpoint,
|
||||
## End Exporter
|
||||
# Configuration
|
||||
ConfigurationEndpoint,
|
||||
## End Configuration
|
||||
)
|
||||
|
||||
|
||||
#TODO: Delete this file
|
||||
# This url file has been deprecated use apiserver/plane/urls folder to create new urls
|
||||
|
||||
urlpatterns = [
|
||||
# Social Auth
|
||||
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
|
||||
@@ -168,6 +206,7 @@ urlpatterns = [
|
||||
"magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
|
||||
),
|
||||
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
|
||||
path('token/refresh/', TokenRefreshView.as_view(), name='token_refresh'),
|
||||
# Email verification
|
||||
path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
|
||||
path(
|
||||
@@ -194,6 +233,15 @@ urlpatterns = [
|
||||
),
|
||||
name="users",
|
||||
),
|
||||
path(
|
||||
"users/me/settings/",
|
||||
UserEndpoint.as_view(
|
||||
{
|
||||
"get": "retrieve_user_settings",
|
||||
}
|
||||
),
|
||||
name="users",
|
||||
),
|
||||
path(
|
||||
"users/me/change-password/",
|
||||
ChangePasswordEndpoint.as_view(),
|
||||
@@ -202,9 +250,18 @@ urlpatterns = [
|
||||
path(
|
||||
"users/me/onboard/",
|
||||
UpdateUserOnBoardedEndpoint.as_view(),
|
||||
name="change-password",
|
||||
name="user-onboard",
|
||||
),
|
||||
path(
|
||||
"users/me/tour-completed/",
|
||||
UpdateUserTourCompletedEndpoint.as_view(),
|
||||
name="user-tour",
|
||||
),
|
||||
path(
|
||||
"users/workspaces/<str:slug>/activities/",
|
||||
UserActivityEndpoint.as_view(),
|
||||
name="user-activities",
|
||||
),
|
||||
path("users/activities/", UserActivityEndpoint.as_view(), name="user-activities"),
|
||||
# user workspaces
|
||||
path(
|
||||
"users/me/workspaces/",
|
||||
@@ -320,6 +377,11 @@ urlpatterns = [
|
||||
),
|
||||
name="workspace",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-members/",
|
||||
WorkspaceMembersEndpoint.as_view(),
|
||||
name="workspace-members",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/teams/",
|
||||
TeamMemberViewSet.as_view(
|
||||
@@ -378,6 +440,36 @@ urlpatterns = [
|
||||
),
|
||||
name="workspace-themes",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-stats/<uuid:user_id>/",
|
||||
WorkspaceUserProfileStatsEndpoint.as_view(),
|
||||
name="workspace-user-stats",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-activity/<uuid:user_id>/",
|
||||
WorkspaceUserActivityEndpoint.as_view(),
|
||||
name="workspace-user-activity",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-profile/<uuid:user_id>/",
|
||||
WorkspaceUserProfileEndpoint.as_view(),
|
||||
name="workspace-user-profile-page",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-issues/<uuid:user_id>/",
|
||||
WorkspaceUserProfileIssuesEndpoint.as_view(),
|
||||
name="workspace-user-profile-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/labels/",
|
||||
WorkspaceLabelsEndpoint.as_view(),
|
||||
name="workspace-labels",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/members/leave/",
|
||||
LeaveWorkspaceEndpoint.as_view(),
|
||||
name="workspace-labels",
|
||||
),
|
||||
## End Workspaces ##
|
||||
# Projects
|
||||
path(
|
||||
@@ -428,6 +520,11 @@ urlpatterns = [
|
||||
),
|
||||
name="project",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/",
|
||||
ProjectMemberEndpoint.as_view(),
|
||||
name="project",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/members/add/",
|
||||
AddMemberToProjectEndpoint.as_view(),
|
||||
@@ -487,6 +584,16 @@ urlpatterns = [
|
||||
),
|
||||
name="project",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/members/leave/",
|
||||
LeaveProjectEndpoint.as_view(),
|
||||
name="project",
|
||||
),
|
||||
path(
|
||||
"project-covers/",
|
||||
ProjectPublicCoverImagesEndpoint.as_view(),
|
||||
name="project-covers",
|
||||
),
|
||||
# End Projects
|
||||
# States
|
||||
path(
|
||||
@@ -564,9 +671,35 @@ urlpatterns = [
|
||||
name="project-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/views/<uuid:view_id>/issues/",
|
||||
ViewIssuesEndpoint.as_view(),
|
||||
name="project-view-issues",
|
||||
"workspaces/<str:slug>/views/",
|
||||
GlobalViewViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="global-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/views/<uuid:pk>/",
|
||||
GlobalViewViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="global-view",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/issues/",
|
||||
GlobalViewIssuesViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
name="global-view-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-views/",
|
||||
@@ -686,11 +819,6 @@ urlpatterns = [
|
||||
),
|
||||
name="project-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/issues/",
|
||||
WorkSpaceIssuesEndpoint.as_view(),
|
||||
name="workspace-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/",
|
||||
LabelViewSet.as_view(
|
||||
@@ -770,6 +898,11 @@ urlpatterns = [
|
||||
IssueAttachmentEndpoint.as_view(),
|
||||
name="project-issue-attachments",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/export-issues/",
|
||||
ExportIssuesEndpoint.as_view(),
|
||||
name="export-issues",
|
||||
),
|
||||
## End Issues
|
||||
## Issue Activity
|
||||
path(
|
||||
@@ -830,28 +963,53 @@ urlpatterns = [
|
||||
name="project-issue-subscribers",
|
||||
),
|
||||
## End Issue Subscribers
|
||||
## IssueProperty
|
||||
# Issue Reactions
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-properties/",
|
||||
IssuePropertyViewSet.as_view(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
||||
IssueReactionViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-roadmap",
|
||||
name="project-issue-reactions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-properties/<uuid:pk>/",
|
||||
IssuePropertyViewSet.as_view(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
||||
IssueReactionViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-roadmap",
|
||||
name="project-issue-reactions",
|
||||
),
|
||||
## End Issue Reactions
|
||||
# Comment Reactions
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
||||
CommentReactionViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-comment-reactions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
||||
CommentReactionViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-comment-reactions",
|
||||
),
|
||||
## End Comment Reactions
|
||||
## IssueProperty
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-display-properties/",
|
||||
IssueUserDisplayPropertyEndpoint.as_view(),
|
||||
name="project-issue-display-properties",
|
||||
),
|
||||
## IssueProperty Ebd
|
||||
## Issue Archives
|
||||
@@ -884,6 +1042,49 @@ urlpatterns = [
|
||||
name="project-issue-archive",
|
||||
),
|
||||
## End Issue Archives
|
||||
## Issue Relation
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/",
|
||||
IssueRelationViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="issue-relation",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/<uuid:pk>/",
|
||||
IssueRelationViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="issue-relation",
|
||||
),
|
||||
## End Issue Relation
|
||||
## Issue Drafts
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/",
|
||||
IssueDraftViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-draft",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/<uuid:pk>/",
|
||||
IssueDraftViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-draft",
|
||||
),
|
||||
## End Issue Drafts
|
||||
## File Assets
|
||||
path(
|
||||
"workspaces/<str:slug>/file-assets/",
|
||||
@@ -1234,7 +1435,7 @@ urlpatterns = [
|
||||
## End Importer
|
||||
# Search
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/search/",
|
||||
"workspaces/<str:slug>/search/",
|
||||
GlobalSearchEndpoint.as_view(),
|
||||
name="global-search",
|
||||
),
|
||||
@@ -1244,20 +1445,23 @@ urlpatterns = [
|
||||
name="project-issue-search",
|
||||
),
|
||||
## End Search
|
||||
# Gpt
|
||||
# External
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
|
||||
GPTIntegrationEndpoint.as_view(),
|
||||
name="importer",
|
||||
),
|
||||
## End Gpt
|
||||
# Release Notes
|
||||
path(
|
||||
"release-notes/",
|
||||
ReleaseNotesEndpoint.as_view(),
|
||||
name="release-notes",
|
||||
),
|
||||
## End Release Notes
|
||||
path(
|
||||
"unsplash/",
|
||||
UnsplashEndpoint.as_view(),
|
||||
name="release-notes",
|
||||
),
|
||||
## End External
|
||||
# Inbox
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
|
||||
@@ -1377,5 +1581,160 @@ urlpatterns = [
|
||||
),
|
||||
name="notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/unread/",
|
||||
UnreadNotificationEndpoint.as_view(),
|
||||
name="unread-notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/mark-all-read/",
|
||||
MarkAllReadNotificationViewSet.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="mark-all-read-notifications",
|
||||
),
|
||||
## End Notification
|
||||
# Public Boards
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/",
|
||||
ProjectDeployBoardViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/<uuid:pk>/",
|
||||
ProjectDeployBoardViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/settings/",
|
||||
ProjectDeployBoardPublicSettingsEndpoint.as_view(),
|
||||
name="project-deploy-board-settings",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/",
|
||||
ProjectIssuesPublicEndpoint.as_view(),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/",
|
||||
IssueRetrievePublicEndpoint.as_view(),
|
||||
name="workspace-project-boards",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
||||
IssueCommentPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="issue-comments-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
||||
IssueCommentPublicViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="issue-comments-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
||||
IssueReactionPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="issue-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
||||
IssueReactionPublicViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="issue-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
||||
CommentReactionPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="comment-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
||||
CommentReactionPublicViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="comment-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
||||
InboxIssuePublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
||||
InboxIssuePublicViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/votes/",
|
||||
IssueVotePublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="issue-vote-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/",
|
||||
WorkspaceProjectDeployBoardEndpoint.as_view(),
|
||||
name="workspace-project-boards",
|
||||
),
|
||||
## End Public Boards
|
||||
# Configuration
|
||||
path(
|
||||
"configs/",
|
||||
ConfigurationEndpoint.as_view(),
|
||||
name="configuration",
|
||||
),
|
||||
## End Configuration
|
||||
]
|
||||
@@ -2,38 +2,38 @@ from .project import (
|
||||
ProjectViewSet,
|
||||
ProjectMemberViewSet,
|
||||
UserProjectInvitationsViewset,
|
||||
InviteProjectEndpoint,
|
||||
ProjectInvitationsViewset,
|
||||
AddTeamToProjectEndpoint,
|
||||
ProjectMemberInvitationsViewset,
|
||||
ProjectMemberInviteDetailViewSet,
|
||||
ProjectIdentifierEndpoint,
|
||||
AddMemberToProjectEndpoint,
|
||||
ProjectJoinEndpoint,
|
||||
ProjectUserViewsEndpoint,
|
||||
ProjectMemberUserEndpoint,
|
||||
ProjectFavoritesViewSet,
|
||||
ProjectDeployBoardViewSet,
|
||||
ProjectDeployBoardPublicSettingsEndpoint,
|
||||
WorkspaceProjectDeployBoardEndpoint,
|
||||
ProjectPublicCoverImagesEndpoint,
|
||||
)
|
||||
from .people import (
|
||||
from .user import (
|
||||
UserEndpoint,
|
||||
UpdateUserOnBoardedEndpoint,
|
||||
UpdateUserTourCompletedEndpoint,
|
||||
UserActivityEndpoint,
|
||||
)
|
||||
|
||||
from .oauth import OauthEndpoint
|
||||
|
||||
from .base import BaseAPIView, BaseViewSet
|
||||
from .base import BaseAPIView, BaseViewSet, WebhookMixin
|
||||
|
||||
from .workspace import (
|
||||
WorkSpaceViewSet,
|
||||
UserWorkSpacesEndpoint,
|
||||
WorkSpaceAvailabilityCheckEndpoint,
|
||||
InviteWorkspaceEndpoint,
|
||||
JoinWorkspaceEndpoint,
|
||||
WorkspaceJoinEndpoint,
|
||||
WorkSpaceMemberViewSet,
|
||||
TeamMemberViewSet,
|
||||
WorkspaceInvitationsViewset,
|
||||
UserWorkspaceInvitationsEndpoint,
|
||||
UserWorkspaceInvitationEndpoint,
|
||||
UserWorkspaceInvitationsViewSet,
|
||||
UserLastProjectWithWorkspaceEndpoint,
|
||||
WorkspaceMemberUserEndpoint,
|
||||
WorkspaceMemberUserViewsEndpoint,
|
||||
@@ -41,9 +41,19 @@ from .workspace import (
|
||||
UserIssueCompletedGraphEndpoint,
|
||||
UserWorkspaceDashboardEndpoint,
|
||||
WorkspaceThemeViewSet,
|
||||
WorkspaceUserProfileStatsEndpoint,
|
||||
WorkspaceUserActivityEndpoint,
|
||||
WorkspaceUserProfileEndpoint,
|
||||
WorkspaceUserProfileIssuesEndpoint,
|
||||
WorkspaceLabelsEndpoint,
|
||||
)
|
||||
from .state import StateViewSet
|
||||
from .view import IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet
|
||||
from .view import (
|
||||
GlobalViewViewSet,
|
||||
GlobalViewIssuesViewSet,
|
||||
IssueViewViewSet,
|
||||
IssueViewFavoriteViewSet,
|
||||
)
|
||||
from .cycle import (
|
||||
CycleViewSet,
|
||||
CycleIssueViewSet,
|
||||
@@ -54,10 +64,12 @@ from .cycle import (
|
||||
from .asset import FileAssetEndpoint, UserAssetsEndpoint
|
||||
from .issue import (
|
||||
IssueViewSet,
|
||||
IssueListEndpoint,
|
||||
IssueListGroupedEndpoint,
|
||||
WorkSpaceIssuesEndpoint,
|
||||
IssueActivityEndpoint,
|
||||
IssueCommentViewSet,
|
||||
IssuePropertyViewSet,
|
||||
IssueUserDisplayPropertyEndpoint,
|
||||
LabelViewSet,
|
||||
BulkDeleteIssuesEndpoint,
|
||||
UserWorkSpaceIssues,
|
||||
@@ -67,6 +79,16 @@ from .issue import (
|
||||
IssueAttachmentEndpoint,
|
||||
IssueArchiveViewSet,
|
||||
IssueSubscriberViewSet,
|
||||
IssueCommentPublicViewSet,
|
||||
CommentReactionViewSet,
|
||||
IssueReactionViewSet,
|
||||
IssueReactionPublicViewSet,
|
||||
CommentReactionPublicViewSet,
|
||||
IssueVotePublicViewSet,
|
||||
IssueRelationViewSet,
|
||||
IssueRetrievePublicEndpoint,
|
||||
ProjectIssuesPublicEndpoint,
|
||||
IssueDraftViewSet,
|
||||
)
|
||||
|
||||
from .auth_extended import (
|
||||
@@ -93,7 +115,7 @@ from .module import (
|
||||
ModuleFavoriteViewSet,
|
||||
)
|
||||
|
||||
from .api_token import ApiTokenEndpoint
|
||||
from .api import ApiTokenEndpoint
|
||||
|
||||
from .integration import (
|
||||
WorkspaceIntegrationViewSet,
|
||||
@@ -124,17 +146,14 @@ from .page import (
|
||||
from .search import GlobalSearchEndpoint, IssueSearchEndpoint
|
||||
|
||||
|
||||
from .gpt import GPTIntegrationEndpoint
|
||||
from .external import GPTIntegrationEndpoint, ReleaseNotesEndpoint, UnsplashEndpoint
|
||||
|
||||
from .estimate import (
|
||||
ProjectEstimatePointEndpoint,
|
||||
BulkEstimatePointEndpoint,
|
||||
)
|
||||
|
||||
|
||||
from .release import ReleaseNotesEndpoint
|
||||
|
||||
from .inbox import InboxViewSet, InboxIssueViewSet
|
||||
from .inbox import InboxViewSet, InboxIssueViewSet, InboxIssuePublicViewSet
|
||||
|
||||
from .analytic import (
|
||||
AnalyticsEndpoint,
|
||||
@@ -144,4 +163,14 @@ from .analytic import (
|
||||
DefaultAnalyticsEndpoint,
|
||||
)
|
||||
|
||||
from .notification import NotificationViewSet
|
||||
from .notification import (
|
||||
NotificationViewSet,
|
||||
UnreadNotificationEndpoint,
|
||||
MarkAllReadNotificationViewSet,
|
||||
)
|
||||
|
||||
from .exporter import ExportIssuesEndpoint
|
||||
|
||||
from .config import ConfigurationEndpoint
|
||||
|
||||
from .webhook import WebhookEndpoint, WebhookLogsEndpoint, WebhookSecretRegenerateEndpoint
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
# Django imports
|
||||
from django.db.models import (
|
||||
Count,
|
||||
Sum,
|
||||
F,
|
||||
Q
|
||||
)
|
||||
from django.db.models import Count, Sum, F, Q
|
||||
from django.db.models.functions import ExtractMonth
|
||||
|
||||
# Third party imports
|
||||
@@ -28,82 +23,156 @@ class AnalyticsEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
x_axis = request.GET.get("x_axis", False)
|
||||
y_axis = request.GET.get("y_axis", False)
|
||||
x_axis = request.GET.get("x_axis", False)
|
||||
y_axis = request.GET.get("y_axis", False)
|
||||
segment = request.GET.get("segment", False)
|
||||
|
||||
if not x_axis or not y_axis:
|
||||
return Response(
|
||||
{"error": "x-axis and y-axis dimensions are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
segment = request.GET.get("segment", False)
|
||||
filters = issue_filters(request.GET, "GET")
|
||||
|
||||
queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
|
||||
|
||||
total_issues = queryset.count()
|
||||
distribution = build_graph_plot(
|
||||
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
|
||||
)
|
||||
|
||||
colors = dict()
|
||||
if x_axis in ["state__name", "state__group"] or segment in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
]:
|
||||
if x_axis in ["state__name", "state__group"]:
|
||||
key = "name" if x_axis == "state__name" else "group"
|
||||
else:
|
||||
key = "name" if segment == "state__name" else "group"
|
||||
|
||||
colors = (
|
||||
State.objects.filter(
|
||||
~Q(name="Triage"),
|
||||
workspace__slug=slug, project_id__in=filters.get("project__in")
|
||||
).values(key, "color")
|
||||
if filters.get("project__in", False)
|
||||
else State.objects.filter(~Q(name="Triage"), workspace__slug=slug).values(key, "color")
|
||||
)
|
||||
|
||||
if x_axis in ["labels__name"] or segment in ["labels__name"]:
|
||||
colors = (
|
||||
Label.objects.filter(
|
||||
workspace__slug=slug, project_id__in=filters.get("project__in")
|
||||
).values("name", "color")
|
||||
if filters.get("project__in", False)
|
||||
else Label.objects.filter(workspace__slug=slug).values(
|
||||
"name", "color"
|
||||
)
|
||||
)
|
||||
|
||||
assignee_details = {}
|
||||
if x_axis in ["assignees__email"] or segment in ["assignees__email"]:
|
||||
assignee_details = (
|
||||
Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False)
|
||||
.order_by("assignees__id")
|
||||
.distinct("assignees__id")
|
||||
.values("assignees__avatar", "assignees__email", "assignees__first_name", "assignees__last_name")
|
||||
)
|
||||
valid_xaxis_segment = [
|
||||
"state_id",
|
||||
"state__group",
|
||||
"labels__id",
|
||||
"assignees__id",
|
||||
"estimate_point",
|
||||
"issue_cycle__cycle_id",
|
||||
"issue_module__module_id",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"created_at",
|
||||
"completed_at",
|
||||
]
|
||||
|
||||
valid_yaxis = [
|
||||
"issue_count",
|
||||
"estimate",
|
||||
]
|
||||
|
||||
# Check for x-axis and y-axis as thery are required parameters
|
||||
if (
|
||||
not x_axis
|
||||
or not y_axis
|
||||
or not x_axis in valid_xaxis_segment
|
||||
or not y_axis in valid_yaxis
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"total": total_issues,
|
||||
"distribution": distribution,
|
||||
"extras": {"colors": colors, "assignee_details": assignee_details},
|
||||
"error": "x-axis and y-axis dimensions are required and the values should be valid"
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# If segment is present it cannot be same as x-axis
|
||||
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
|
||||
return Response(
|
||||
{
|
||||
"error": "Both segment and x axis cannot be same and segment should be valid"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Additional filters that need to be applied
|
||||
filters = issue_filters(request.GET, "GET")
|
||||
|
||||
# Get the issues for the workspace with the additional filters applied
|
||||
queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
|
||||
|
||||
# Get the total issue count
|
||||
total_issues = queryset.count()
|
||||
|
||||
# Build the graph payload
|
||||
distribution = build_graph_plot(
|
||||
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
|
||||
)
|
||||
|
||||
state_details = {}
|
||||
if x_axis in ["state_id"] or segment in ["state_id"]:
|
||||
state_details = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
**filters,
|
||||
)
|
||||
.distinct("state_id")
|
||||
.order_by("state_id")
|
||||
.values("state_id", "state__name", "state__color")
|
||||
)
|
||||
|
||||
label_details = {}
|
||||
if x_axis in ["labels__id"] or segment in ["labels__id"]:
|
||||
label_details = (
|
||||
Issue.objects.filter(
|
||||
workspace__slug=slug, **filters, labels__id__isnull=False
|
||||
)
|
||||
.distinct("labels__id")
|
||||
.order_by("labels__id")
|
||||
.values("labels__id", "labels__color", "labels__name")
|
||||
)
|
||||
|
||||
assignee_details = {}
|
||||
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
|
||||
assignee_details = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug, **filters, assignees__avatar__isnull=False
|
||||
)
|
||||
.order_by("assignees__id")
|
||||
.distinct("assignees__id")
|
||||
.values(
|
||||
"assignees__avatar",
|
||||
"assignees__display_name",
|
||||
"assignees__first_name",
|
||||
"assignees__last_name",
|
||||
"assignees__id",
|
||||
)
|
||||
)
|
||||
|
||||
cycle_details = {}
|
||||
if x_axis in ["issue_cycle__cycle_id"] or segment in ["issue_cycle__cycle_id"]:
|
||||
cycle_details = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
**filters,
|
||||
issue_cycle__cycle_id__isnull=False,
|
||||
)
|
||||
.distinct("issue_cycle__cycle_id")
|
||||
.order_by("issue_cycle__cycle_id")
|
||||
.values(
|
||||
"issue_cycle__cycle_id",
|
||||
"issue_cycle__cycle__name",
|
||||
)
|
||||
)
|
||||
|
||||
module_details = {}
|
||||
if x_axis in ["issue_module__module_id"] or segment in [
|
||||
"issue_module__module_id"
|
||||
]:
|
||||
module_details = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
**filters,
|
||||
issue_module__module_id__isnull=False,
|
||||
)
|
||||
.distinct("issue_module__module_id")
|
||||
.order_by("issue_module__module_id")
|
||||
.values(
|
||||
"issue_module__module_id",
|
||||
"issue_module__module__name",
|
||||
)
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"total": total_issues,
|
||||
"distribution": distribution,
|
||||
"extras": {
|
||||
"state_details": state_details,
|
||||
"assignee_details": assignee_details,
|
||||
"label_details": label_details,
|
||||
"cycle_details": cycle_details,
|
||||
"module_details": module_details,
|
||||
},
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class AnalyticViewViewset(BaseViewSet):
|
||||
permission_classes = [
|
||||
@@ -128,45 +197,30 @@ class SavedAnalyticEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
def get(self, request, slug, analytic_id):
|
||||
try:
|
||||
analytic_view = AnalyticView.objects.get(
|
||||
pk=analytic_id, workspace__slug=slug
|
||||
)
|
||||
analytic_view = AnalyticView.objects.get(pk=analytic_id, workspace__slug=slug)
|
||||
|
||||
filter = analytic_view.query
|
||||
queryset = Issue.issue_objects.filter(**filter)
|
||||
filter = analytic_view.query
|
||||
queryset = Issue.issue_objects.filter(**filter)
|
||||
|
||||
x_axis = analytic_view.query_dict.get("x_axis", False)
|
||||
y_axis = analytic_view.query_dict.get("y_axis", False)
|
||||
x_axis = analytic_view.query_dict.get("x_axis", False)
|
||||
y_axis = analytic_view.query_dict.get("y_axis", False)
|
||||
|
||||
if not x_axis or not y_axis:
|
||||
return Response(
|
||||
{"error": "x-axis and y-axis dimensions are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
segment = request.GET.get("segment", False)
|
||||
distribution = build_graph_plot(
|
||||
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
|
||||
)
|
||||
total_issues = queryset.count()
|
||||
if not x_axis or not y_axis:
|
||||
return Response(
|
||||
{"total": total_issues, "distribution": distribution},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except AnalyticView.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Analytic View Does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
{"error": "x-axis and y-axis dimensions are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
segment = request.GET.get("segment", False)
|
||||
distribution = build_graph_plot(
|
||||
queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
|
||||
)
|
||||
total_issues = queryset.count()
|
||||
return Response(
|
||||
{"total": total_issues, "distribution": distribution},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class ExportAnalyticsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
@@ -174,33 +228,64 @@ class ExportAnalyticsEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
def post(self, request, slug):
|
||||
try:
|
||||
x_axis = request.data.get("x_axis", False)
|
||||
y_axis = request.data.get("y_axis", False)
|
||||
x_axis = request.data.get("x_axis", False)
|
||||
y_axis = request.data.get("y_axis", False)
|
||||
segment = request.data.get("segment", False)
|
||||
|
||||
if not x_axis or not y_axis:
|
||||
return Response(
|
||||
{"error": "x-axis and y-axis dimensions are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
valid_xaxis_segment = [
|
||||
"state_id",
|
||||
"state__group",
|
||||
"labels__id",
|
||||
"assignees__id",
|
||||
"estimate_point",
|
||||
"issue_cycle__cycle_id",
|
||||
"issue_module__module_id",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"created_at",
|
||||
"completed_at",
|
||||
]
|
||||
|
||||
analytic_export_task.delay(
|
||||
email=request.user.email, data=request.data, slug=slug
|
||||
)
|
||||
valid_yaxis = [
|
||||
"issue_count",
|
||||
"estimate",
|
||||
]
|
||||
|
||||
# Check for x-axis and y-axis as thery are required parameters
|
||||
if (
|
||||
not x_axis
|
||||
or not y_axis
|
||||
or not x_axis in valid_xaxis_segment
|
||||
or not y_axis in valid_yaxis
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}"
|
||||
"error": "x-axis and y-axis dimensions are required and the values should be valid"
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# If segment is present it cannot be same as x-axis
|
||||
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
|
||||
return Response(
|
||||
{
|
||||
"error": "Both segment and x axis cannot be same and segment should be valid"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
analytic_export_task.delay(
|
||||
email=request.user.email, data=request.data, slug=slug
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}"
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class DefaultAnalyticsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
@@ -208,90 +293,92 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
filters = issue_filters(request.GET, "GET")
|
||||
filters = issue_filters(request.GET, "GET")
|
||||
base_issues = Issue.issue_objects.filter(workspace__slug=slug, **filters)
|
||||
|
||||
queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
|
||||
total_issues = base_issues.count()
|
||||
|
||||
total_issues = queryset.count()
|
||||
state_groups = base_issues.annotate(state_group=F("state__group"))
|
||||
|
||||
total_issues_classified = (
|
||||
queryset.annotate(state_group=F("state__group"))
|
||||
.values("state_group")
|
||||
.annotate(state_count=Count("state_group"))
|
||||
.order_by("state_group")
|
||||
)
|
||||
total_issues_classified = (
|
||||
state_groups.values("state_group")
|
||||
.annotate(state_count=Count("state_group"))
|
||||
.order_by("state_group")
|
||||
)
|
||||
|
||||
open_issues = queryset.filter(
|
||||
state__group__in=["backlog", "unstarted", "started"]
|
||||
).count()
|
||||
open_issues_groups = ["backlog", "unstarted", "started"]
|
||||
open_issues_queryset = state_groups.filter(state__group__in=open_issues_groups)
|
||||
|
||||
open_issues_classified = (
|
||||
queryset.filter(state__group__in=["backlog", "unstarted", "started"])
|
||||
.annotate(state_group=F("state__group"))
|
||||
.values("state_group")
|
||||
.annotate(state_count=Count("state_group"))
|
||||
.order_by("state_group")
|
||||
)
|
||||
open_issues = open_issues_queryset.count()
|
||||
open_issues_classified = (
|
||||
open_issues_queryset.values("state_group")
|
||||
.annotate(state_count=Count("state_group"))
|
||||
.order_by("state_group")
|
||||
)
|
||||
|
||||
issue_completed_month_wise = (
|
||||
queryset.filter(completed_at__isnull=False)
|
||||
.annotate(month=ExtractMonth("completed_at"))
|
||||
.values("month")
|
||||
.annotate(count=Count("*"))
|
||||
.order_by("month")
|
||||
)
|
||||
most_issue_created_user = (
|
||||
queryset.exclude(created_by=None)
|
||||
.values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__email")
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")
|
||||
)[:5]
|
||||
issue_completed_month_wise = (
|
||||
base_issues.filter(completed_at__isnull=False)
|
||||
.annotate(month=ExtractMonth("completed_at"))
|
||||
.values("month")
|
||||
.annotate(count=Count("*"))
|
||||
.order_by("month")
|
||||
)
|
||||
|
||||
most_issue_closed_user = (
|
||||
queryset.filter(completed_at__isnull=False, assignees__isnull=False)
|
||||
.values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__email")
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")
|
||||
)[:5]
|
||||
user_details = [
|
||||
"created_by__first_name",
|
||||
"created_by__last_name",
|
||||
"created_by__avatar",
|
||||
"created_by__display_name",
|
||||
"created_by__id",
|
||||
]
|
||||
|
||||
pending_issue_user = (
|
||||
queryset.filter(completed_at__isnull=True)
|
||||
.values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__email")
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")
|
||||
)
|
||||
most_issue_created_user = (
|
||||
base_issues.exclude(created_by=None)
|
||||
.values(*user_details)
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")[:5]
|
||||
)
|
||||
|
||||
open_estimate_sum = (
|
||||
queryset.filter(
|
||||
state__group__in=["backlog", "unstarted", "started"]
|
||||
).aggregate(open_estimate_sum=Sum("estimate_point"))
|
||||
)["open_estimate_sum"]
|
||||
print(open_estimate_sum)
|
||||
|
||||
total_estimate_sum = queryset.aggregate(
|
||||
total_estimate_sum=Sum("estimate_point")
|
||||
)["total_estimate_sum"]
|
||||
user_assignee_details = [
|
||||
"assignees__first_name",
|
||||
"assignees__last_name",
|
||||
"assignees__avatar",
|
||||
"assignees__display_name",
|
||||
"assignees__id",
|
||||
]
|
||||
|
||||
return Response(
|
||||
{
|
||||
"total_issues": total_issues,
|
||||
"total_issues_classified": total_issues_classified,
|
||||
"open_issues": open_issues,
|
||||
"open_issues_classified": open_issues_classified,
|
||||
"issue_completed_month_wise": issue_completed_month_wise,
|
||||
"most_issue_created_user": most_issue_created_user,
|
||||
"most_issue_closed_user": most_issue_closed_user,
|
||||
"pending_issue_user": pending_issue_user,
|
||||
"open_estimate_sum": open_estimate_sum,
|
||||
"total_estimate_sum": total_estimate_sum,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
most_issue_closed_user = (
|
||||
base_issues.filter(completed_at__isnull=False)
|
||||
.exclude(assignees=None)
|
||||
.values(*user_assignee_details)
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")[:5]
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
pending_issue_user = (
|
||||
base_issues.filter(completed_at__isnull=True)
|
||||
.values(*user_assignee_details)
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")
|
||||
)
|
||||
|
||||
open_estimate_sum = open_issues_queryset.aggregate(sum=Sum("estimate_point"))[
|
||||
"sum"
|
||||
]
|
||||
total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))["sum"]
|
||||
|
||||
return Response(
|
||||
{
|
||||
"total_issues": total_issues,
|
||||
"total_issues_classified": total_issues_classified,
|
||||
"open_issues": open_issues,
|
||||
"open_issues_classified": open_issues_classified,
|
||||
"issue_completed_month_wise": issue_completed_month_wise,
|
||||
"most_issue_created_user": most_issue_created_user,
|
||||
"most_issue_closed_user": most_issue_closed_user,
|
||||
"pending_issue_user": pending_issue_user,
|
||||
"open_estimate_sum": open_estimate_sum,
|
||||
"total_estimate_sum": total_estimate_sum,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
78
apiserver/plane/api/views/api.py
Normal file
78
apiserver/plane/api/views/api.py
Normal file
@@ -0,0 +1,78 @@
|
||||
# Python import
|
||||
from uuid import uuid4
|
||||
|
||||
# Third party
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
# Module import
|
||||
from .base import BaseAPIView
|
||||
from plane.db.models import APIToken, Workspace
|
||||
from plane.api.serializers import APITokenSerializer, APITokenReadSerializer
|
||||
from plane.api.permissions import WorkspaceOwnerPermission
|
||||
|
||||
|
||||
class ApiTokenEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceOwnerPermission,
|
||||
]
|
||||
|
||||
def post(self, request, slug):
|
||||
label = request.data.get("label", str(uuid4().hex))
|
||||
description = request.data.get("description", "")
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
expired_at = request.data.get("expired_at", None)
|
||||
|
||||
# Check the user type
|
||||
user_type = 1 if request.user.is_bot else 0
|
||||
|
||||
api_token = APIToken.objects.create(
|
||||
label=label,
|
||||
description=description,
|
||||
user=request.user,
|
||||
workspace=workspace,
|
||||
user_type=user_type,
|
||||
expired_at=expired_at,
|
||||
)
|
||||
|
||||
serializer = APITokenSerializer(api_token)
|
||||
# Token will be only visible while creating
|
||||
return Response(
|
||||
serializer.data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
def get(self, request, slug, pk=None):
|
||||
if pk == None:
|
||||
api_tokens = APIToken.objects.filter(
|
||||
user=request.user, workspace__slug=slug
|
||||
)
|
||||
serializer = APITokenReadSerializer(api_tokens, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
else:
|
||||
api_tokens = APIToken.objects.get(
|
||||
user=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
serializer = APITokenReadSerializer(api_tokens)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def delete(self, request, slug, pk):
|
||||
api_token = APIToken.objects.get(
|
||||
workspace__slug=slug,
|
||||
user=request.user,
|
||||
pk=pk,
|
||||
)
|
||||
api_token.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def patch(self, request, slug, pk):
|
||||
api_token = APIToken.objects.get(
|
||||
workspace__slug=slug,
|
||||
user=request.user,
|
||||
pk=pk,
|
||||
)
|
||||
serializer = APITokenSerializer(api_token, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -1,70 +0,0 @@
|
||||
# Python import
|
||||
from uuid import uuid4
|
||||
|
||||
# Third party
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module import
|
||||
from .base import BaseAPIView
|
||||
from plane.db.models import APIToken
|
||||
from plane.api.serializers import APITokenSerializer
|
||||
|
||||
|
||||
class ApiTokenEndpoint(BaseAPIView):
|
||||
def post(self, request):
|
||||
try:
|
||||
label = request.data.get("label", str(uuid4().hex))
|
||||
workspace = request.data.get("workspace", False)
|
||||
|
||||
if not workspace:
|
||||
return Response(
|
||||
{"error": "Workspace is required"}, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
api_token = APIToken.objects.create(
|
||||
label=label, user=request.user, workspace_id=workspace
|
||||
)
|
||||
|
||||
serializer = APITokenSerializer(api_token)
|
||||
# Token will be only vissible while creating
|
||||
return Response(
|
||||
{"api_token": serializer.data, "token": api_token.token},
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def get(self, request):
|
||||
try:
|
||||
api_tokens = APIToken.objects.filter(user=request.user)
|
||||
serializer = APITokenSerializer(api_tokens, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def delete(self, request, pk):
|
||||
try:
|
||||
api_token = APIToken.objects.get(pk=pk)
|
||||
api_token.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except APIToken.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Token does not exists"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
@@ -20,90 +20,56 @@ class FileAssetEndpoint(BaseAPIView):
|
||||
def get(self, request, workspace_id, asset_key):
|
||||
asset_key = str(workspace_id) + "/" + asset_key
|
||||
files = FileAsset.objects.filter(asset=asset_key)
|
||||
serializer = FileAssetSerializer(files, context={"request": request}, many=True)
|
||||
return Response(serializer.data)
|
||||
if files.exists():
|
||||
serializer = FileAssetSerializer(files, context={"request": request}, many=True)
|
||||
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
|
||||
else:
|
||||
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
def post(self, request, slug):
|
||||
try:
|
||||
serializer = FileAssetSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
# Get the workspace
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
serializer.save(workspace_id=workspace.id)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Workspace.DoesNotExist:
|
||||
return Response({"error": "Workspace does not exist"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
serializer = FileAssetSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
# Get the workspace
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
serializer.save(workspace_id=workspace.id)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
def delete(self, request, workspace_id, asset_key):
|
||||
try:
|
||||
asset_key = str(workspace_id) + "/" + asset_key
|
||||
file_asset = FileAsset.objects.get(asset=asset_key)
|
||||
# Delete the file from storage
|
||||
file_asset.asset.delete(save=False)
|
||||
# Delete the file object
|
||||
file_asset.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except FileAsset.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
asset_key = str(workspace_id) + "/" + asset_key
|
||||
file_asset = FileAsset.objects.get(asset=asset_key)
|
||||
# Delete the file from storage
|
||||
file_asset.asset.delete(save=False)
|
||||
# Delete the file object
|
||||
file_asset.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class UserAssetsEndpoint(BaseAPIView):
|
||||
parser_classes = (MultiPartParser, FormParser)
|
||||
|
||||
def get(self, request, asset_key):
|
||||
try:
|
||||
files = FileAsset.objects.filter(asset=asset_key, created_by=request.user)
|
||||
serializer = FileAssetSerializer(files, context={"request": request})
|
||||
return Response(serializer.data)
|
||||
except FileAsset.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "File Asset does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
if files.exists():
|
||||
serializer = FileAssetSerializer(files, context={"request": request})
|
||||
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
|
||||
else:
|
||||
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
|
||||
|
||||
def post(self, request):
|
||||
try:
|
||||
serializer = FileAssetSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
def delete(self, request, asset_key):
|
||||
try:
|
||||
file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user)
|
||||
# Delete the file from storage
|
||||
file_asset.asset.delete(save=False)
|
||||
# Delete the file object
|
||||
file_asset.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except FileAsset.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -9,7 +9,6 @@ from django.utils.encoding import (
|
||||
DjangoUnicodeDecodeError,
|
||||
)
|
||||
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
|
||||
from django.contrib.sites.shortcuts import get_current_site
|
||||
from django.conf import settings
|
||||
|
||||
## Third Party Imports
|
||||
@@ -22,7 +21,7 @@ from sentry_sdk import capture_exception
|
||||
|
||||
## Module imports
|
||||
from . import BaseAPIView
|
||||
from plane.api.serializers.people import (
|
||||
from plane.api.serializers import (
|
||||
ChangePasswordSerializer,
|
||||
ResetPasswordSerializer,
|
||||
)
|
||||
@@ -34,7 +33,7 @@ from plane.bgtasks.forgot_password_task import forgot_password
|
||||
class RequestEmailVerificationEndpoint(BaseAPIView):
|
||||
def get(self, request):
|
||||
token = RefreshToken.for_user(request.user).access_token
|
||||
current_site = settings.WEB_URL
|
||||
current_site = request.META.get('HTTP_ORIGIN')
|
||||
email_verification.delay(
|
||||
request.user.first_name, request.user.email, token, current_site
|
||||
)
|
||||
@@ -56,11 +55,11 @@ class VerifyEmailEndpoint(BaseAPIView):
|
||||
return Response(
|
||||
{"email": "Successfully activated"}, status=status.HTTP_200_OK
|
||||
)
|
||||
except jwt.ExpiredSignatureError as indentifier:
|
||||
except jwt.ExpiredSignatureError as _indentifier:
|
||||
return Response(
|
||||
{"email": "Activation expired"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except jwt.exceptions.DecodeError as indentifier:
|
||||
except jwt.exceptions.DecodeError as _indentifier:
|
||||
return Response(
|
||||
{"email": "Invalid token"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
@@ -77,7 +76,7 @@ class ForgotPasswordEndpoint(BaseAPIView):
|
||||
uidb64 = urlsafe_base64_encode(smart_bytes(user.id))
|
||||
token = PasswordResetTokenGenerator().make_token(user)
|
||||
|
||||
current_site = settings.WEB_URL
|
||||
current_site = request.META.get('HTTP_ORIGIN')
|
||||
|
||||
forgot_password.delay(
|
||||
user.first_name, user.email, uidb64, token, current_site
|
||||
@@ -128,32 +127,25 @@ class ResetPasswordEndpoint(BaseAPIView):
|
||||
|
||||
class ChangePasswordEndpoint(BaseAPIView):
|
||||
def post(self, request):
|
||||
try:
|
||||
serializer = ChangePasswordSerializer(data=request.data)
|
||||
serializer = ChangePasswordSerializer(data=request.data)
|
||||
|
||||
user = User.objects.get(pk=request.user.id)
|
||||
if serializer.is_valid():
|
||||
# Check old password
|
||||
if not user.object.check_password(serializer.data.get("old_password")):
|
||||
return Response(
|
||||
{"old_password": ["Wrong password."]},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
# set_password also hashes the password that the user will get
|
||||
self.object.set_password(serializer.data.get("new_password"))
|
||||
self.object.save()
|
||||
response = {
|
||||
"status": "success",
|
||||
"code": status.HTTP_200_OK,
|
||||
"message": "Password updated successfully",
|
||||
}
|
||||
user = User.objects.get(pk=request.user.id)
|
||||
if serializer.is_valid():
|
||||
# Check old password
|
||||
if not user.object.check_password(serializer.data.get("old_password")):
|
||||
return Response(
|
||||
{"old_password": ["Wrong password."]},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
# set_password also hashes the password that the user will get
|
||||
self.object.set_password(serializer.data.get("new_password"))
|
||||
self.object.save()
|
||||
response = {
|
||||
"status": "success",
|
||||
"code": status.HTTP_200_OK,
|
||||
"message": "Password updated successfully",
|
||||
}
|
||||
|
||||
return Response(response)
|
||||
return Response(response)
|
||||
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@@ -4,7 +4,7 @@ import random
|
||||
import string
|
||||
import json
|
||||
import requests
|
||||
|
||||
from requests.exceptions import RequestException
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.core.exceptions import ValidationError
|
||||
@@ -22,8 +22,13 @@ from sentry_sdk import capture_exception, capture_message
|
||||
|
||||
# Module imports
|
||||
from . import BaseAPIView
|
||||
from plane.db.models import User
|
||||
from plane.api.serializers import UserSerializer
|
||||
from plane.db.models import (
|
||||
User,
|
||||
WorkspaceMemberInvite,
|
||||
WorkspaceMember,
|
||||
ProjectMemberInvite,
|
||||
ProjectMember,
|
||||
)
|
||||
from plane.settings.redis import redis_instance
|
||||
from plane.bgtasks.magic_link_code_task import magic_link
|
||||
|
||||
@@ -40,63 +45,109 @@ class SignUpEndpoint(BaseAPIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def post(self, request):
|
||||
if not settings.ENABLE_SIGNUP:
|
||||
return Response(
|
||||
{
|
||||
"error": "New account creation is disabled. Please contact your site administrator"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
email = request.data.get("email", False)
|
||||
password = request.data.get("password", False)
|
||||
|
||||
## Raise exception if any of the above are missing
|
||||
if not email or not password:
|
||||
return Response(
|
||||
{"error": "Both email and password are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
email = email.strip().lower()
|
||||
|
||||
try:
|
||||
if not settings.ENABLE_SIGNUP:
|
||||
return Response(
|
||||
{
|
||||
"error": "New account creation is disabled. Please contact your site administrator"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
validate_email(email)
|
||||
except ValidationError as e:
|
||||
return Response(
|
||||
{"error": "Please provide a valid email address."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check if the user already exists
|
||||
if User.objects.filter(email=email).exists():
|
||||
return Response(
|
||||
{"error": "User with this email already exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
user = User.objects.create(email=email, username=uuid.uuid4().hex)
|
||||
user.set_password(password)
|
||||
|
||||
# settings last actives for the user
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
|
||||
# Check if user has any accepted invites for workspace and add them to workspace
|
||||
workspace_member_invites = WorkspaceMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
)
|
||||
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
workspace_id=workspace_member_invite.workspace_id,
|
||||
member=user,
|
||||
role=workspace_member_invite.role,
|
||||
)
|
||||
for workspace_member_invite in workspace_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
email = request.data.get("email", False)
|
||||
password = request.data.get("password", False)
|
||||
# Check if user has any project invites
|
||||
project_member_invites = ProjectMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
)
|
||||
|
||||
## Raise exception if any of the above are missing
|
||||
if not email or not password:
|
||||
return Response(
|
||||
{"error": "Both email and password are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
# Add user to workspace
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
workspace_id=project_member_invite.workspace_id,
|
||||
role=project_member_invite.role
|
||||
if project_member_invite.role in [5, 10, 15]
|
||||
else 15,
|
||||
member=user,
|
||||
created_by_id=project_member_invite.created_by_id,
|
||||
)
|
||||
for project_member_invite in project_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
email = email.strip().lower()
|
||||
|
||||
try:
|
||||
validate_email(email)
|
||||
except ValidationError as e:
|
||||
return Response(
|
||||
{"error": "Please provide a valid email address."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check if the user already exists
|
||||
if User.objects.filter(email=email).exists():
|
||||
return Response(
|
||||
{"error": "User with this email already exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
user = User.objects.create(email=email, username=uuid.uuid4().hex)
|
||||
user.set_password(password)
|
||||
|
||||
# settings last actives for the user
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
|
||||
serialized_user = UserSerializer(user).data
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"user": serialized_user,
|
||||
}
|
||||
# Now add the users to project
|
||||
ProjectMember.objects.bulk_create(
|
||||
[
|
||||
ProjectMember(
|
||||
workspace_id=project_member_invite.workspace_id,
|
||||
role=project_member_invite.role
|
||||
if project_member_invite.role in [5, 10, 15]
|
||||
else 15,
|
||||
member=user,
|
||||
created_by_id=project_member_invite.created_by_id,
|
||||
) for project_member_invite in project_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
# Delete all the invites
|
||||
workspace_member_invites.delete()
|
||||
project_member_invites.delete()
|
||||
|
||||
try:
|
||||
# Send Analytics
|
||||
if settings.ANALYTICS_BASE_API:
|
||||
_ = requests.post(
|
||||
@@ -118,79 +169,133 @@ class SignUpEndpoint(BaseAPIView):
|
||||
"event_type": "SIGN_UP",
|
||||
},
|
||||
)
|
||||
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
except RequestException as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
}
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class SignInEndpoint(BaseAPIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def post(self, request):
|
||||
email = request.data.get("email", False)
|
||||
password = request.data.get("password", False)
|
||||
|
||||
## Raise exception if any of the above are missing
|
||||
if not email or not password:
|
||||
return Response(
|
||||
{"error": "Both email and password are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
email = email.strip().lower()
|
||||
|
||||
try:
|
||||
email = request.data.get("email", False)
|
||||
password = request.data.get("password", False)
|
||||
validate_email(email)
|
||||
except ValidationError as e:
|
||||
return Response(
|
||||
{"error": "Please provide a valid email address."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
## Raise exception if any of the above are missing
|
||||
if not email or not password:
|
||||
return Response(
|
||||
{"error": "Both email and password are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
user = User.objects.filter(email=email).first()
|
||||
|
||||
if user is None:
|
||||
return Response(
|
||||
{
|
||||
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
|
||||
},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
# Sign up Process
|
||||
if not user.check_password(password):
|
||||
return Response(
|
||||
{
|
||||
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
|
||||
},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
if not user.is_active:
|
||||
return Response(
|
||||
{
|
||||
"error": "Your account has been deactivated. Please contact your site administrator."
|
||||
},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
# settings last active for the user
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
|
||||
# Check if user has any accepted invites for workspace and add them to workspace
|
||||
workspace_member_invites = WorkspaceMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
)
|
||||
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
workspace_id=workspace_member_invite.workspace_id,
|
||||
member=user,
|
||||
role=workspace_member_invite.role,
|
||||
)
|
||||
for workspace_member_invite in workspace_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
email = email.strip().lower()
|
||||
# Check if user has any project invites
|
||||
project_member_invites = ProjectMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
)
|
||||
|
||||
try:
|
||||
validate_email(email)
|
||||
except ValidationError as e:
|
||||
return Response(
|
||||
{"error": "Please provide a valid email address."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
# Add user to workspace
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
workspace_id=project_member_invite.workspace_id,
|
||||
role=project_member_invite.role
|
||||
if project_member_invite.role in [5, 10, 15]
|
||||
else 15,
|
||||
member=user,
|
||||
created_by_id=project_member_invite.created_by_id,
|
||||
)
|
||||
for project_member_invite in project_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
user = User.objects.filter(email=email).first()
|
||||
# Now add the users to project
|
||||
ProjectMember.objects.bulk_create(
|
||||
[
|
||||
ProjectMember(
|
||||
workspace_id=project_member_invite.workspace_id,
|
||||
role=project_member_invite.role
|
||||
if project_member_invite.role in [5, 10, 15]
|
||||
else 15,
|
||||
member=user,
|
||||
created_by_id=project_member_invite.created_by_id,
|
||||
) for project_member_invite in project_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
if user is None:
|
||||
return Response(
|
||||
{
|
||||
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
|
||||
},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
# Sign up Process
|
||||
if not user.check_password(password):
|
||||
return Response(
|
||||
{
|
||||
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
|
||||
},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
if not user.is_active:
|
||||
return Response(
|
||||
{
|
||||
"error": "Your account has been deactivated. Please contact your site administrator."
|
||||
},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
serialized_user = UserSerializer(user).data
|
||||
|
||||
# settings last active for the user
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
# Delete all the invites
|
||||
workspace_member_invites.delete()
|
||||
project_member_invites.delete()
|
||||
try:
|
||||
# Send Analytics
|
||||
if settings.ANALYTICS_BASE_API:
|
||||
_ = requests.post(
|
||||
@@ -212,57 +317,39 @@ class SignInEndpoint(BaseAPIView):
|
||||
"event_type": "SIGN_IN",
|
||||
},
|
||||
)
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"user": serialized_user,
|
||||
}
|
||||
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
except RequestException as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{
|
||||
"error": "Something went wrong. Please try again later or contact the support team."
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
}
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class SignOutEndpoint(BaseAPIView):
|
||||
def post(self, request):
|
||||
try:
|
||||
refresh_token = request.data.get("refresh_token", False)
|
||||
refresh_token = request.data.get("refresh_token", False)
|
||||
|
||||
if not refresh_token:
|
||||
capture_message("No refresh token provided")
|
||||
return Response(
|
||||
{
|
||||
"error": "Something went wrong. Please try again later or contact the support team."
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
user = User.objects.get(pk=request.user.id)
|
||||
|
||||
user.last_logout_time = timezone.now()
|
||||
user.last_logout_ip = request.META.get("REMOTE_ADDR")
|
||||
|
||||
user.save()
|
||||
|
||||
token = RefreshToken(refresh_token)
|
||||
token.blacklist()
|
||||
return Response({"message": "success"}, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
if not refresh_token:
|
||||
capture_message("No refresh token provided")
|
||||
return Response(
|
||||
{
|
||||
"error": "Something went wrong. Please try again later or contact the support team."
|
||||
},
|
||||
{"error": "No refresh token provided"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
user = User.objects.get(pk=request.user.id)
|
||||
|
||||
user.last_logout_time = timezone.now()
|
||||
user.last_logout_ip = request.META.get("REMOTE_ADDR")
|
||||
|
||||
user.save()
|
||||
|
||||
token = RefreshToken(refresh_token)
|
||||
token.blacklist()
|
||||
return Response({"message": "success"}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class MagicSignInGenerateEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
@@ -270,72 +357,63 @@ class MagicSignInGenerateEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
def post(self, request):
|
||||
try:
|
||||
email = request.data.get("email", False)
|
||||
email = request.data.get("email", False)
|
||||
|
||||
if not email:
|
||||
if not email:
|
||||
return Response(
|
||||
{"error": "Please provide a valid email address"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Clean up
|
||||
email = email.strip().lower()
|
||||
validate_email(email)
|
||||
|
||||
## Generate a random token
|
||||
token = (
|
||||
"".join(random.choices(string.ascii_lowercase, k=4))
|
||||
+ "-"
|
||||
+ "".join(random.choices(string.ascii_lowercase, k=4))
|
||||
+ "-"
|
||||
+ "".join(random.choices(string.ascii_lowercase, k=4))
|
||||
)
|
||||
|
||||
ri = redis_instance()
|
||||
|
||||
key = "magic_" + str(email)
|
||||
|
||||
# Check if the key already exists in python
|
||||
if ri.exists(key):
|
||||
data = json.loads(ri.get(key))
|
||||
|
||||
current_attempt = data["current_attempt"] + 1
|
||||
|
||||
if data["current_attempt"] > 2:
|
||||
return Response(
|
||||
{"error": "Please provide a valid email address"},
|
||||
{"error": "Max attempts exhausted. Please try again later."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
validate_email(email)
|
||||
value = {
|
||||
"current_attempt": current_attempt,
|
||||
"email": email,
|
||||
"token": token,
|
||||
}
|
||||
expiry = 600
|
||||
|
||||
## Generate a random token
|
||||
token = (
|
||||
"".join(random.choices(string.ascii_lowercase + string.digits, k=4))
|
||||
+ "-"
|
||||
+ "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
|
||||
+ "-"
|
||||
+ "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
|
||||
)
|
||||
ri.set(key, json.dumps(value), ex=expiry)
|
||||
|
||||
ri = redis_instance()
|
||||
else:
|
||||
value = {"current_attempt": 0, "email": email, "token": token}
|
||||
expiry = 600
|
||||
|
||||
key = "magic_" + str(email)
|
||||
ri.set(key, json.dumps(value), ex=expiry)
|
||||
|
||||
# Check if the key already exists in python
|
||||
if ri.exists(key):
|
||||
data = json.loads(ri.get(key))
|
||||
|
||||
current_attempt = data["current_attempt"] + 1
|
||||
current_site = request.META.get('HTTP_ORIGIN')
|
||||
magic_link.delay(email, key, token, current_site)
|
||||
|
||||
if data["current_attempt"] > 2:
|
||||
return Response(
|
||||
{"error": "Max attempts exhausted. Please try again later."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
value = {
|
||||
"current_attempt": current_attempt,
|
||||
"email": email,
|
||||
"token": token,
|
||||
}
|
||||
expiry = 600
|
||||
|
||||
ri.set(key, json.dumps(value), ex=expiry)
|
||||
|
||||
else:
|
||||
value = {"current_attempt": 0, "email": email, "token": token}
|
||||
expiry = 600
|
||||
|
||||
ri.set(key, json.dumps(value), ex=expiry)
|
||||
|
||||
current_site = settings.WEB_URL
|
||||
magic_link.delay(email, key, token, current_site)
|
||||
|
||||
return Response({"key": key}, status=status.HTTP_200_OK)
|
||||
except ValidationError:
|
||||
return Response(
|
||||
{"error": "Please provide a valid email address."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
return Response({"key": key}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class MagicSignInEndpoint(BaseAPIView):
|
||||
@@ -344,27 +422,34 @@ class MagicSignInEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
def post(self, request):
|
||||
try:
|
||||
user_token = request.data.get("token", "").strip().lower()
|
||||
key = request.data.get("key", False)
|
||||
user_token = request.data.get("token", "").strip()
|
||||
key = request.data.get("key", False).strip().lower()
|
||||
|
||||
if not key or user_token == "":
|
||||
return Response(
|
||||
{"error": "User token and key are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
if not key or user_token == "":
|
||||
return Response(
|
||||
{"error": "User token and key are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
ri = redis_instance()
|
||||
ri = redis_instance()
|
||||
|
||||
if ri.exists(key):
|
||||
data = json.loads(ri.get(key))
|
||||
if ri.exists(key):
|
||||
data = json.loads(ri.get(key))
|
||||
|
||||
token = data["token"]
|
||||
email = data["email"]
|
||||
token = data["token"]
|
||||
email = data["email"]
|
||||
|
||||
if str(token) == str(user_token):
|
||||
if User.objects.filter(email=email).exists():
|
||||
user = User.objects.get(email=email)
|
||||
if str(token) == str(user_token):
|
||||
if User.objects.filter(email=email).exists():
|
||||
user = User.objects.get(email=email)
|
||||
if not user.is_active:
|
||||
return Response(
|
||||
{
|
||||
"error": "Your account has been deactivated. Please contact your site administrator."
|
||||
},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
try:
|
||||
# Send event to Jitsu for tracking
|
||||
if settings.ANALYTICS_BASE_API:
|
||||
_ = requests.post(
|
||||
@@ -381,20 +466,21 @@ class MagicSignInEndpoint(BaseAPIView):
|
||||
"user": {"email": email, "id": str(user.id)},
|
||||
"device_ctx": {
|
||||
"ip": request.META.get("REMOTE_ADDR"),
|
||||
"user_agent": request.META.get(
|
||||
"HTTP_USER_AGENT"
|
||||
),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||
},
|
||||
"event_type": "SIGN_IN",
|
||||
},
|
||||
)
|
||||
else:
|
||||
user = User.objects.create(
|
||||
email=email,
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
)
|
||||
except RequestException as e:
|
||||
capture_exception(e)
|
||||
else:
|
||||
user = User.objects.create(
|
||||
email=email,
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
)
|
||||
try:
|
||||
# Send event to Jitsu for tracking
|
||||
if settings.ANALYTICS_BASE_API:
|
||||
_ = requests.post(
|
||||
@@ -411,46 +497,94 @@ class MagicSignInEndpoint(BaseAPIView):
|
||||
"user": {"email": email, "id": str(user.id)},
|
||||
"device_ctx": {
|
||||
"ip": request.META.get("REMOTE_ADDR"),
|
||||
"user_agent": request.META.get(
|
||||
"HTTP_USER_AGENT"
|
||||
),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||
},
|
||||
"event_type": "SIGN_UP",
|
||||
},
|
||||
)
|
||||
except RequestException as e:
|
||||
capture_exception(e)
|
||||
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
serialized_user = UserSerializer(user).data
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"user": serialized_user,
|
||||
}
|
||||
# Check if user has any accepted invites for workspace and add them to workspace
|
||||
workspace_member_invites = WorkspaceMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
)
|
||||
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
workspace_id=workspace_member_invite.workspace_id,
|
||||
member=user,
|
||||
role=workspace_member_invite.role,
|
||||
)
|
||||
for workspace_member_invite in workspace_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Your login code was incorrect. Please try again."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
# Check if user has any project invites
|
||||
project_member_invites = ProjectMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
)
|
||||
|
||||
# Add user to workspace
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
workspace_id=project_member_invite.workspace_id,
|
||||
role=project_member_invite.role
|
||||
if project_member_invite.role in [5, 10, 15]
|
||||
else 15,
|
||||
member=user,
|
||||
created_by_id=project_member_invite.created_by_id,
|
||||
)
|
||||
for project_member_invite in project_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Now add the users to project
|
||||
ProjectMember.objects.bulk_create(
|
||||
[
|
||||
ProjectMember(
|
||||
workspace_id=project_member_invite.workspace_id,
|
||||
role=project_member_invite.role
|
||||
if project_member_invite.role in [5, 10, 15]
|
||||
else 15,
|
||||
member=user,
|
||||
created_by_id=project_member_invite.created_by_id,
|
||||
) for project_member_invite in project_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Delete all the invites
|
||||
workspace_member_invites.delete()
|
||||
project_member_invites.delete()
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
}
|
||||
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
else:
|
||||
return Response(
|
||||
{"error": "The magic code/link has expired please try again"},
|
||||
{"error": "Your login code was incorrect. Please try again."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
{"error": "The magic code/link has expired please try again"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -1,25 +1,68 @@
|
||||
# Python imports
|
||||
import zoneinfo
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.urls import resolve
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.db import IntegrityError
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
# Third part imports
|
||||
from rest_framework import status
|
||||
from rest_framework import status
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.exceptions import APIException
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.filters import SearchFilter
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.exceptions import NotFound
|
||||
from sentry_sdk import capture_exception
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import Workspace, Project
|
||||
from plane.utils.paginator import BasePaginator
|
||||
from plane.bgtasks.webhook_task import send_webhook
|
||||
|
||||
|
||||
class BaseViewSet(ModelViewSet, BasePaginator):
|
||||
class TimezoneMixin:
|
||||
"""
|
||||
This enables timezone conversion according
|
||||
to the user set timezone
|
||||
"""
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
super().initial(request, *args, **kwargs)
|
||||
if request.user.is_authenticated:
|
||||
timezone.activate(zoneinfo.ZoneInfo(request.user.user_timezone))
|
||||
else:
|
||||
timezone.deactivate()
|
||||
|
||||
|
||||
class WebhookMixin:
|
||||
webhook_event = None
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
response = super().finalize_response(request, response, *args, **kwargs)
|
||||
|
||||
if (
|
||||
self.webhook_event
|
||||
and self.request.method in ["POST", "PATCH", "DELETE"]
|
||||
and response.status_code in [200, 201, 204]
|
||||
):
|
||||
send_webhook.delay(
|
||||
event=self.webhook_event,
|
||||
event_data=json.dumps(response.data, cls=DjangoJSONEncoder),
|
||||
action=self.request.method,
|
||||
slug=self.workspace_slug,
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
||||
model = None
|
||||
|
||||
permission_classes = [
|
||||
@@ -42,16 +85,61 @@ class BaseViewSet(ModelViewSet, BasePaginator):
|
||||
capture_exception(e)
|
||||
raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def handle_exception(self, exc):
|
||||
"""
|
||||
Handle any exception that occurs, by returning an appropriate response,
|
||||
or re-raising the error.
|
||||
"""
|
||||
try:
|
||||
response = super().handle_exception(exc)
|
||||
return response
|
||||
except Exception as e:
|
||||
if isinstance(e, IntegrityError):
|
||||
return Response(
|
||||
{"error": "The payload is not valid"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if isinstance(e, ValidationError):
|
||||
return Response(
|
||||
{"error": "Please provide valid detail"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if isinstance(e, ObjectDoesNotExist):
|
||||
model_name = str(exc).split(" matching query does not exist.")[0]
|
||||
return Response(
|
||||
{"error": f"{model_name} does not exist."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
if isinstance(e, KeyError):
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": f"key {e} does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
print(e) if settings.DEBUG else print("Server Error")
|
||||
capture_exception(e)
|
||||
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
response = super().dispatch(request, *args, **kwargs)
|
||||
try:
|
||||
response = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
if settings.DEBUG:
|
||||
from django.db import connection
|
||||
if settings.DEBUG:
|
||||
from django.db import connection
|
||||
|
||||
print(
|
||||
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
||||
)
|
||||
return response
|
||||
print(
|
||||
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
||||
)
|
||||
|
||||
return response
|
||||
except Exception as exc:
|
||||
response = self.handle_exception(exc)
|
||||
return exc
|
||||
|
||||
@property
|
||||
def workspace_slug(self):
|
||||
@@ -67,8 +155,7 @@ class BaseViewSet(ModelViewSet, BasePaginator):
|
||||
return self.kwargs.get("pk", None)
|
||||
|
||||
|
||||
class BaseAPIView(APIView, BasePaginator):
|
||||
|
||||
class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
||||
permission_classes = [
|
||||
IsAuthenticated,
|
||||
]
|
||||
@@ -87,16 +174,58 @@ class BaseAPIView(APIView, BasePaginator):
|
||||
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||
return queryset
|
||||
|
||||
def handle_exception(self, exc):
|
||||
"""
|
||||
Handle any exception that occurs, by returning an appropriate response,
|
||||
or re-raising the error.
|
||||
"""
|
||||
try:
|
||||
response = super().handle_exception(exc)
|
||||
return response
|
||||
except Exception as e:
|
||||
if isinstance(e, IntegrityError):
|
||||
return Response(
|
||||
{"error": "The payload is not valid"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if isinstance(e, ValidationError):
|
||||
return Response(
|
||||
{"error": "Please provide valid detail"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if isinstance(e, ObjectDoesNotExist):
|
||||
model_name = str(exc).split(" matching query does not exist.")[0]
|
||||
return Response(
|
||||
{"error": f"{model_name} does not exist."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
if isinstance(e, KeyError):
|
||||
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
response = super().dispatch(request, *args, **kwargs)
|
||||
try:
|
||||
response = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
if settings.DEBUG:
|
||||
from django.db import connection
|
||||
if settings.DEBUG:
|
||||
from django.db import connection
|
||||
|
||||
print(
|
||||
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
||||
)
|
||||
return response
|
||||
print(
|
||||
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
||||
)
|
||||
return response
|
||||
|
||||
except Exception as exc:
|
||||
response = self.handle_exception(exc)
|
||||
return exc
|
||||
|
||||
@property
|
||||
def workspace_slug(self):
|
||||
|
||||
37
apiserver/plane/api/views/config.py
Normal file
37
apiserver/plane/api/views/config.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# Python imports
|
||||
import os
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from .base import BaseAPIView
|
||||
|
||||
|
||||
class ConfigurationEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def get(self, request):
|
||||
data = {}
|
||||
data["google_client_id"] = os.environ.get("GOOGLE_CLIENT_ID", None)
|
||||
data["github_client_id"] = os.environ.get("GITHUB_CLIENT_ID", None)
|
||||
data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None)
|
||||
data["magic_login"] = (
|
||||
bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD)
|
||||
) and os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0") == "1"
|
||||
data["email_password_login"] = (
|
||||
os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1"
|
||||
)
|
||||
data["slack_client_id"] = os.environ.get("SLACK_CLIENT_ID", None)
|
||||
data["posthog_api_key"] = os.environ.get("POSTHOG_API_KEY", None)
|
||||
data["posthog_host"] = os.environ.get("POSTHOG_HOST", None)
|
||||
data["has_unsplash_configured"] = bool(settings.UNSPLASH_ACCESS_KEY)
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,3 @@
|
||||
# Django imports
|
||||
from django.db import IntegrityError
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
@@ -23,7 +20,6 @@ class ProjectEstimatePointEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
||||
if project.estimate_id is not None:
|
||||
estimate_points = EstimatePoint.objects.filter(
|
||||
@@ -34,12 +30,6 @@ class ProjectEstimatePointEndpoint(BaseAPIView):
|
||||
serializer = EstimatePointSerializer(estimate_points, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response([], status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class BulkEstimatePointEndpoint(BaseViewSet):
|
||||
@@ -50,204 +40,139 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
||||
serializer_class = EstimateSerializer
|
||||
|
||||
def list(self, request, slug, project_id):
|
||||
try:
|
||||
estimates = Estimate.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).prefetch_related("points").select_related("workspace", "project")
|
||||
serializer = EstimateReadSerializer(estimates, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
estimates = Estimate.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).prefetch_related("points").select_related("workspace", "project")
|
||||
serializer = EstimateReadSerializer(estimates, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
try:
|
||||
if not request.data.get("estimate", False):
|
||||
return Response(
|
||||
{"error": "Estimate is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
estimate_points = request.data.get("estimate_points", [])
|
||||
|
||||
if not len(estimate_points) or len(estimate_points) > 8:
|
||||
return Response(
|
||||
{"error": "Estimate points are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
estimate_serializer = EstimateSerializer(data=request.data.get("estimate"))
|
||||
if not estimate_serializer.is_valid():
|
||||
return Response(
|
||||
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
try:
|
||||
estimate = estimate_serializer.save(project_id=project_id)
|
||||
except IntegrityError:
|
||||
return Response(
|
||||
{"errror": "Estimate with the name already exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
estimate_points = EstimatePoint.objects.bulk_create(
|
||||
[
|
||||
EstimatePoint(
|
||||
estimate=estimate,
|
||||
key=estimate_point.get("key", 0),
|
||||
value=estimate_point.get("value", ""),
|
||||
description=estimate_point.get("description", ""),
|
||||
project_id=project_id,
|
||||
workspace_id=estimate.workspace_id,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
for estimate_point in estimate_points
|
||||
],
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
estimate_point_serializer = EstimatePointSerializer(
|
||||
estimate_points, many=True
|
||||
)
|
||||
|
||||
if not request.data.get("estimate", False):
|
||||
return Response(
|
||||
{
|
||||
"estimate": estimate_serializer.data,
|
||||
"estimate_points": estimate_point_serializer.data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Estimate.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Estimate does not exist"},
|
||||
{"error": "Estimate is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
|
||||
estimate_points = request.data.get("estimate_points", [])
|
||||
|
||||
if not len(estimate_points) or len(estimate_points) > 8:
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
{"error": "Estimate points are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
estimate_serializer = EstimateSerializer(data=request.data.get("estimate"))
|
||||
if not estimate_serializer.is_valid():
|
||||
return Response(
|
||||
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
estimate = estimate_serializer.save(project_id=project_id)
|
||||
estimate_points = EstimatePoint.objects.bulk_create(
|
||||
[
|
||||
EstimatePoint(
|
||||
estimate=estimate,
|
||||
key=estimate_point.get("key", 0),
|
||||
value=estimate_point.get("value", ""),
|
||||
description=estimate_point.get("description", ""),
|
||||
project_id=project_id,
|
||||
workspace_id=estimate.workspace_id,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
for estimate_point in estimate_points
|
||||
],
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
estimate_point_serializer = EstimatePointSerializer(
|
||||
estimate_points, many=True
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"estimate": estimate_serializer.data,
|
||||
"estimate_points": estimate_point_serializer.data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def retrieve(self, request, slug, project_id, estimate_id):
|
||||
try:
|
||||
estimate = Estimate.objects.get(
|
||||
pk=estimate_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = EstimateReadSerializer(estimate)
|
||||
return Response(
|
||||
serializer.data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Estimate.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
estimate = Estimate.objects.get(
|
||||
pk=estimate_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = EstimateReadSerializer(estimate)
|
||||
return Response(
|
||||
serializer.data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, project_id, estimate_id):
|
||||
try:
|
||||
if not request.data.get("estimate", False):
|
||||
return Response(
|
||||
{"error": "Estimate is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not len(request.data.get("estimate_points", [])):
|
||||
return Response(
|
||||
{"error": "Estimate points are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
estimate = Estimate.objects.get(pk=estimate_id)
|
||||
|
||||
estimate_serializer = EstimateSerializer(
|
||||
estimate, data=request.data.get("estimate"), partial=True
|
||||
)
|
||||
if not estimate_serializer.is_valid():
|
||||
return Response(
|
||||
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
try:
|
||||
estimate = estimate_serializer.save()
|
||||
except IntegrityError:
|
||||
return Response(
|
||||
{"errror": "Estimate with the name already exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
estimate_points_data = request.data.get("estimate_points", [])
|
||||
|
||||
estimate_points = EstimatePoint.objects.filter(
|
||||
pk__in=[
|
||||
estimate_point.get("id") for estimate_point in estimate_points_data
|
||||
],
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
estimate_id=estimate_id,
|
||||
)
|
||||
|
||||
updated_estimate_points = []
|
||||
for estimate_point in estimate_points:
|
||||
# Find the data for that estimate point
|
||||
estimate_point_data = [
|
||||
point
|
||||
for point in estimate_points_data
|
||||
if point.get("id") == str(estimate_point.id)
|
||||
]
|
||||
if len(estimate_point_data):
|
||||
estimate_point.value = estimate_point_data[0].get(
|
||||
"value", estimate_point.value
|
||||
)
|
||||
updated_estimate_points.append(estimate_point)
|
||||
|
||||
try:
|
||||
EstimatePoint.objects.bulk_update(
|
||||
updated_estimate_points, ["value"], batch_size=10,
|
||||
)
|
||||
except IntegrityError as e:
|
||||
return Response(
|
||||
{"error": "Values need to be unique for each key"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True)
|
||||
if not request.data.get("estimate", False):
|
||||
return Response(
|
||||
{
|
||||
"estimate": estimate_serializer.data,
|
||||
"estimate_points": estimate_point_serializer.data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Estimate.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
{"error": "Estimate is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not len(request.data.get("estimate_points", [])):
|
||||
return Response(
|
||||
{"error": "Estimate points are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
estimate = Estimate.objects.get(pk=estimate_id)
|
||||
|
||||
estimate_serializer = EstimateSerializer(
|
||||
estimate, data=request.data.get("estimate"), partial=True
|
||||
)
|
||||
if not estimate_serializer.is_valid():
|
||||
return Response(
|
||||
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
estimate = estimate_serializer.save()
|
||||
|
||||
estimate_points_data = request.data.get("estimate_points", [])
|
||||
|
||||
estimate_points = EstimatePoint.objects.filter(
|
||||
pk__in=[
|
||||
estimate_point.get("id") for estimate_point in estimate_points_data
|
||||
],
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
estimate_id=estimate_id,
|
||||
)
|
||||
|
||||
updated_estimate_points = []
|
||||
for estimate_point in estimate_points:
|
||||
# Find the data for that estimate point
|
||||
estimate_point_data = [
|
||||
point
|
||||
for point in estimate_points_data
|
||||
if point.get("id") == str(estimate_point.id)
|
||||
]
|
||||
if len(estimate_point_data):
|
||||
estimate_point.value = estimate_point_data[0].get(
|
||||
"value", estimate_point.value
|
||||
)
|
||||
updated_estimate_points.append(estimate_point)
|
||||
|
||||
EstimatePoint.objects.bulk_update(
|
||||
updated_estimate_points, ["value"], batch_size=10,
|
||||
)
|
||||
|
||||
estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True)
|
||||
return Response(
|
||||
{
|
||||
"estimate": estimate_serializer.data,
|
||||
"estimate_points": estimate_point_serializer.data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, project_id, estimate_id):
|
||||
try:
|
||||
estimate = Estimate.objects.get(
|
||||
pk=estimate_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
estimate.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
estimate = Estimate.objects.get(
|
||||
pk=estimate_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
estimate.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
81
apiserver/plane/api/views/exporter.py
Normal file
81
apiserver/plane/api/views/exporter.py
Normal file
@@ -0,0 +1,81 @@
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from . import BaseAPIView
|
||||
from plane.api.permissions import WorkSpaceAdminPermission
|
||||
from plane.bgtasks.export_task import issue_export_task
|
||||
from plane.db.models import Project, ExporterHistory, Workspace
|
||||
|
||||
from plane.api.serializers import ExporterHistorySerializer
|
||||
|
||||
|
||||
class ExportIssuesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
model = ExporterHistory
|
||||
serializer_class = ExporterHistorySerializer
|
||||
|
||||
def post(self, request, slug):
|
||||
# Get the workspace
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
provider = request.data.get("provider", False)
|
||||
multiple = request.data.get("multiple", False)
|
||||
project_ids = request.data.get("project", [])
|
||||
|
||||
if provider in ["csv", "xlsx", "json"]:
|
||||
if not project_ids:
|
||||
project_ids = Project.objects.filter(
|
||||
workspace__slug=slug
|
||||
).values_list("id", flat=True)
|
||||
project_ids = [str(project_id) for project_id in project_ids]
|
||||
|
||||
exporter = ExporterHistory.objects.create(
|
||||
workspace=workspace,
|
||||
project=project_ids,
|
||||
initiated_by=request.user,
|
||||
provider=provider,
|
||||
)
|
||||
|
||||
issue_export_task.delay(
|
||||
provider=exporter.provider,
|
||||
workspace_id=workspace.id,
|
||||
project_ids=project_ids,
|
||||
token_id=exporter.token,
|
||||
multiple=multiple,
|
||||
slug=slug,
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"message": f"Once the export is ready you will be able to download it"
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
else:
|
||||
return Response(
|
||||
{"error": f"Provider '{provider}' not found."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def get(self, request, slug):
|
||||
exporter_history = ExporterHistory.objects.filter(
|
||||
workspace__slug=slug
|
||||
).select_related("workspace","initiated_by")
|
||||
|
||||
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=exporter_history,
|
||||
on_results=lambda exporter_history: ExporterHistorySerializer(
|
||||
exporter_history, many=True
|
||||
).data,
|
||||
)
|
||||
else:
|
||||
return Response(
|
||||
{"error": "per_page and cursor are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
92
apiserver/plane/api/views/external.py
Normal file
92
apiserver/plane/api/views/external.py
Normal file
@@ -0,0 +1,92 @@
|
||||
# Python imports
|
||||
import requests
|
||||
|
||||
# Third party imports
|
||||
import openai
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.permissions import AllowAny
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Module imports
|
||||
from .base import BaseAPIView
|
||||
from plane.api.permissions import ProjectEntityPermission
|
||||
from plane.db.models import Workspace, Project
|
||||
from plane.api.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer
|
||||
from plane.utils.integrations.github import get_release_notes
|
||||
|
||||
|
||||
class GPTIntegrationEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def post(self, request, slug, project_id):
|
||||
if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE:
|
||||
return Response(
|
||||
{"error": "OpenAI API key and engine is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
prompt = request.data.get("prompt", False)
|
||||
task = request.data.get("task", False)
|
||||
|
||||
if not task:
|
||||
return Response(
|
||||
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
final_text = task + "\n" + prompt
|
||||
|
||||
openai.api_key = settings.OPENAI_API_KEY
|
||||
response = openai.ChatCompletion.create(
|
||||
model=settings.GPT_ENGINE,
|
||||
messages=[{"role": "user", "content": final_text}],
|
||||
temperature=0.7,
|
||||
max_tokens=1024,
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
project = Project.objects.get(pk=project_id)
|
||||
|
||||
text = response.choices[0].message.content.strip()
|
||||
text_html = text.replace("\n", "<br/>")
|
||||
return Response(
|
||||
{
|
||||
"response": text,
|
||||
"response_html": text_html,
|
||||
"project_detail": ProjectLiteSerializer(project).data,
|
||||
"workspace_detail": WorkspaceLiteSerializer(workspace).data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class ReleaseNotesEndpoint(BaseAPIView):
|
||||
def get(self, request):
|
||||
release_notes = get_release_notes()
|
||||
return Response(release_notes, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class UnsplashEndpoint(BaseAPIView):
|
||||
|
||||
def get(self, request):
|
||||
query = request.GET.get("query", False)
|
||||
page = request.GET.get("page", 1)
|
||||
per_page = request.GET.get("per_page", 20)
|
||||
|
||||
url = (
|
||||
f"https://api.unsplash.com/search/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&query={query}&page=${page}&per_page={per_page}"
|
||||
if query
|
||||
else f"https://api.unsplash.com/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&page={page}&per_page={per_page}"
|
||||
)
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
resp = requests.get(url=url, headers=headers)
|
||||
return Response(resp.json(), status=resp.status_code)
|
||||
@@ -1,101 +0,0 @@
|
||||
# Python imports
|
||||
import requests
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
import openai
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Module imports
|
||||
from .base import BaseAPIView
|
||||
from plane.api.permissions import ProjectEntityPermission
|
||||
from plane.db.models import Workspace, Project
|
||||
from plane.api.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer
|
||||
|
||||
|
||||
class GPTIntegrationEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def post(self, request, slug, project_id):
|
||||
try:
|
||||
if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE:
|
||||
return Response(
|
||||
{"error": "OpenAI API key and engine is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
count = 0
|
||||
|
||||
# If logger is enabled check for request limit
|
||||
if settings.LOGGER_BASE_URL:
|
||||
try:
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
settings.LOGGER_BASE_URL,
|
||||
json={"user_id": str(request.user.id)},
|
||||
headers=headers,
|
||||
)
|
||||
count = response.json().get("count", 0)
|
||||
if not response.json().get("success", False):
|
||||
return Response(
|
||||
{
|
||||
"error": "You have surpassed the monthly limit for AI assistance"
|
||||
},
|
||||
status=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
|
||||
prompt = request.data.get("prompt", False)
|
||||
task = request.data.get("task", False)
|
||||
|
||||
if not task:
|
||||
return Response(
|
||||
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
final_text = task + "\n" + prompt
|
||||
|
||||
openai.api_key = settings.OPENAI_API_KEY
|
||||
response = openai.Completion.create(
|
||||
model=settings.GPT_ENGINE,
|
||||
prompt=final_text,
|
||||
temperature=0.7,
|
||||
max_tokens=1024,
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
project = Project.objects.get(pk=project_id)
|
||||
|
||||
text = response.choices[0].text.strip()
|
||||
text_html = text.replace("\n", "<br/>")
|
||||
return Response(
|
||||
{
|
||||
"response": text,
|
||||
"response_html": text_html,
|
||||
"count": count,
|
||||
"project_detail": ProjectLiteSerializer(project).data,
|
||||
"workspace_detail": WorkspaceLiteSerializer(workspace).data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except (Workspace.DoesNotExist, Project.DoesNotExist) as e:
|
||||
return Response(
|
||||
{"error": "Workspace or Project Does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
@@ -39,564 +39,488 @@ from plane.utils.integrations.github import get_github_repo_details
|
||||
from plane.utils.importers.jira import jira_project_issue_summary
|
||||
from plane.bgtasks.importer_task import service_importer
|
||||
from plane.utils.html_processor import strip_tags
|
||||
from plane.api.permissions import WorkSpaceAdminPermission
|
||||
|
||||
|
||||
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
|
||||
|
||||
def get(self, request, slug, service):
|
||||
try:
|
||||
if service == "github":
|
||||
owner = request.GET.get("owner", False)
|
||||
repo = request.GET.get("repo", False)
|
||||
if service == "github":
|
||||
owner = request.GET.get("owner", False)
|
||||
repo = request.GET.get("repo", False)
|
||||
|
||||
if not owner or not repo:
|
||||
return Response(
|
||||
{"error": "Owner and repo are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
integration__provider="github", workspace__slug=slug
|
||||
)
|
||||
|
||||
access_tokens_url = workspace_integration.metadata.get(
|
||||
"access_tokens_url", False
|
||||
)
|
||||
|
||||
if not access_tokens_url:
|
||||
return Response(
|
||||
{
|
||||
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
issue_count, labels, collaborators = get_github_repo_details(
|
||||
access_tokens_url, owner, repo
|
||||
)
|
||||
if not owner or not repo:
|
||||
return Response(
|
||||
{
|
||||
"issue_count": issue_count,
|
||||
"labels": labels,
|
||||
"collaborators": collaborators,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
if service == "jira":
|
||||
# Check for all the keys
|
||||
params = {
|
||||
"project_key": "Project key is required",
|
||||
"api_token": "API token is required",
|
||||
"email": "Email is required",
|
||||
"cloud_hostname": "Cloud hostname is required",
|
||||
}
|
||||
|
||||
for key, error_message in params.items():
|
||||
if not request.GET.get(key, False):
|
||||
return Response(
|
||||
{"error": error_message}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
project_key = request.GET.get("project_key", "")
|
||||
api_token = request.GET.get("api_token", "")
|
||||
email = request.GET.get("email", "")
|
||||
cloud_hostname = request.GET.get("cloud_hostname", "")
|
||||
|
||||
response = jira_project_issue_summary(
|
||||
email, api_token, project_key, cloud_hostname
|
||||
)
|
||||
if "error" in response:
|
||||
return Response(response, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(
|
||||
response,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return Response(
|
||||
{"error": "Service not supported yet"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except WorkspaceIntegration.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Requested integration was not installed in the workspace"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class ImportServiceEndpoint(BaseAPIView):
|
||||
def post(self, request, slug, service):
|
||||
try:
|
||||
project_id = request.data.get("project_id", False)
|
||||
|
||||
if not project_id:
|
||||
return Response(
|
||||
{"error": "Project ID is required"},
|
||||
{"error": "Owner and repo are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
integration__provider="github", workspace__slug=slug
|
||||
)
|
||||
|
||||
if service == "github":
|
||||
data = request.data.get("data", False)
|
||||
metadata = request.data.get("metadata", False)
|
||||
config = request.data.get("config", False)
|
||||
if not data or not metadata or not config:
|
||||
return Response(
|
||||
{"error": "Data, config and metadata are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
access_tokens_url = workspace_integration.metadata.get(
|
||||
"access_tokens_url", False
|
||||
)
|
||||
|
||||
api_token = APIToken.objects.filter(
|
||||
user=request.user, workspace=workspace
|
||||
).first()
|
||||
if api_token is None:
|
||||
api_token = APIToken.objects.create(
|
||||
user=request.user,
|
||||
label="Importer",
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
importer = Importer.objects.create(
|
||||
service=service,
|
||||
project_id=project_id,
|
||||
status="queued",
|
||||
initiated_by=request.user,
|
||||
data=data,
|
||||
metadata=metadata,
|
||||
token=api_token,
|
||||
config=config,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
if not access_tokens_url:
|
||||
return Response(
|
||||
{
|
||||
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
service_importer.delay(service, importer.id)
|
||||
serializer = ImporterSerializer(importer)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
issue_count, labels, collaborators = get_github_repo_details(
|
||||
access_tokens_url, owner, repo
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"issue_count": issue_count,
|
||||
"labels": labels,
|
||||
"collaborators": collaborators,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
if service == "jira":
|
||||
data = request.data.get("data", False)
|
||||
metadata = request.data.get("metadata", False)
|
||||
config = request.data.get("config", False)
|
||||
if not data or not metadata:
|
||||
if service == "jira":
|
||||
# Check for all the keys
|
||||
params = {
|
||||
"project_key": "Project key is required",
|
||||
"api_token": "API token is required",
|
||||
"email": "Email is required",
|
||||
"cloud_hostname": "Cloud hostname is required",
|
||||
}
|
||||
|
||||
for key, error_message in params.items():
|
||||
if not request.GET.get(key, False):
|
||||
return Response(
|
||||
{"error": "Data, config and metadata are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
api_token = APIToken.objects.filter(
|
||||
user=request.user, workspace=workspace
|
||||
).first()
|
||||
if api_token is None:
|
||||
api_token = APIToken.objects.create(
|
||||
user=request.user,
|
||||
label="Importer",
|
||||
workspace=workspace,
|
||||
{"error": error_message}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
importer = Importer.objects.create(
|
||||
service=service,
|
||||
project_id=project_id,
|
||||
status="queued",
|
||||
initiated_by=request.user,
|
||||
data=data,
|
||||
metadata=metadata,
|
||||
token=api_token,
|
||||
config=config,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
project_key = request.GET.get("project_key", "")
|
||||
api_token = request.GET.get("api_token", "")
|
||||
email = request.GET.get("email", "")
|
||||
cloud_hostname = request.GET.get("cloud_hostname", "")
|
||||
|
||||
response = jira_project_issue_summary(
|
||||
email, api_token, project_key, cloud_hostname
|
||||
)
|
||||
if "error" in response:
|
||||
return Response(response, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(
|
||||
response,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return Response(
|
||||
{"error": "Service not supported yet"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class ImportServiceEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
def post(self, request, slug, service):
|
||||
project_id = request.data.get("project_id", False)
|
||||
|
||||
if not project_id:
|
||||
return Response(
|
||||
{"error": "Project ID is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
if service == "github":
|
||||
data = request.data.get("data", False)
|
||||
metadata = request.data.get("metadata", False)
|
||||
config = request.data.get("config", False)
|
||||
if not data or not metadata or not config:
|
||||
return Response(
|
||||
{"error": "Data, config and metadata are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
service_importer.delay(service, importer.id)
|
||||
serializer = ImporterSerializer(importer)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
api_token = APIToken.objects.filter(
|
||||
user=request.user, workspace=workspace
|
||||
).first()
|
||||
if api_token is None:
|
||||
api_token = APIToken.objects.create(
|
||||
user=request.user,
|
||||
label="Importer",
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
return Response(
|
||||
{"error": "Servivce not supported yet"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
importer = Importer.objects.create(
|
||||
service=service,
|
||||
project_id=project_id,
|
||||
status="queued",
|
||||
initiated_by=request.user,
|
||||
data=data,
|
||||
metadata=metadata,
|
||||
token=api_token,
|
||||
config=config,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
except (
|
||||
Workspace.DoesNotExist,
|
||||
WorkspaceIntegration.DoesNotExist,
|
||||
Project.DoesNotExist,
|
||||
) as e:
|
||||
return Response(
|
||||
{"error": "Workspace Integration or Project does not exist"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
|
||||
service_importer.delay(service, importer.id)
|
||||
serializer = ImporterSerializer(importer)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
if service == "jira":
|
||||
data = request.data.get("data", False)
|
||||
metadata = request.data.get("metadata", False)
|
||||
config = request.data.get("config", False)
|
||||
if not data or not metadata:
|
||||
return Response(
|
||||
{"error": "Data, config and metadata are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
api_token = APIToken.objects.filter(
|
||||
user=request.user, workspace=workspace
|
||||
).first()
|
||||
if api_token is None:
|
||||
api_token = APIToken.objects.create(
|
||||
user=request.user,
|
||||
label="Importer",
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
importer = Importer.objects.create(
|
||||
service=service,
|
||||
project_id=project_id,
|
||||
status="queued",
|
||||
initiated_by=request.user,
|
||||
data=data,
|
||||
metadata=metadata,
|
||||
token=api_token,
|
||||
config=config,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
|
||||
service_importer.delay(service, importer.id)
|
||||
serializer = ImporterSerializer(importer)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
return Response(
|
||||
{"error": "Servivce not supported yet"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
imports = (
|
||||
Importer.objects.filter(workspace__slug=slug)
|
||||
.order_by("-created_at")
|
||||
.select_related("initiated_by", "project", "workspace")
|
||||
)
|
||||
serializer = ImporterSerializer(imports, many=True)
|
||||
return Response(serializer.data)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
imports = (
|
||||
Importer.objects.filter(workspace__slug=slug)
|
||||
.order_by("-created_at")
|
||||
.select_related("initiated_by", "project", "workspace")
|
||||
)
|
||||
serializer = ImporterSerializer(imports, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
def delete(self, request, slug, service, pk):
|
||||
try:
|
||||
importer = Importer.objects.get(
|
||||
pk=pk, service=service, workspace__slug=slug
|
||||
)
|
||||
importer = Importer.objects.get(
|
||||
pk=pk, service=service, workspace__slug=slug
|
||||
)
|
||||
|
||||
if importer.imported_data is not None:
|
||||
# Delete all imported Issues
|
||||
imported_issues = importer.imported_data.get("issues", [])
|
||||
Issue.issue_objects.filter(id__in=imported_issues).delete()
|
||||
if importer.imported_data is not None:
|
||||
# Delete all imported Issues
|
||||
imported_issues = importer.imported_data.get("issues", [])
|
||||
Issue.issue_objects.filter(id__in=imported_issues).delete()
|
||||
|
||||
# Delete all imported Labels
|
||||
imported_labels = importer.imported_data.get("labels", [])
|
||||
Label.objects.filter(id__in=imported_labels).delete()
|
||||
# Delete all imported Labels
|
||||
imported_labels = importer.imported_data.get("labels", [])
|
||||
Label.objects.filter(id__in=imported_labels).delete()
|
||||
|
||||
if importer.service == "jira":
|
||||
imported_modules = importer.imported_data.get("modules", [])
|
||||
Module.objects.filter(id__in=imported_modules).delete()
|
||||
importer.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
if importer.service == "jira":
|
||||
imported_modules = importer.imported_data.get("modules", [])
|
||||
Module.objects.filter(id__in=imported_modules).delete()
|
||||
importer.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def patch(self, request, slug, service, pk):
|
||||
try:
|
||||
importer = Importer.objects.get(
|
||||
pk=pk, service=service, workspace__slug=slug
|
||||
)
|
||||
serializer = ImporterSerializer(importer, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Importer.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Importer Does not exists"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
importer = Importer.objects.get(
|
||||
pk=pk, service=service, workspace__slug=slug
|
||||
)
|
||||
serializer = ImporterSerializer(importer, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class UpdateServiceImportStatusEndpoint(BaseAPIView):
|
||||
def post(self, request, slug, project_id, service, importer_id):
|
||||
try:
|
||||
importer = Importer.objects.get(
|
||||
pk=importer_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
service=service,
|
||||
)
|
||||
importer.status = request.data.get("status", "processing")
|
||||
importer.save()
|
||||
return Response(status.HTTP_200_OK)
|
||||
except Importer.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Importer does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
importer = Importer.objects.get(
|
||||
pk=importer_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
service=service,
|
||||
)
|
||||
importer.status = request.data.get("status", "processing")
|
||||
importer.save()
|
||||
return Response(status.HTTP_200_OK)
|
||||
|
||||
|
||||
class BulkImportIssuesEndpoint(BaseAPIView):
|
||||
def post(self, request, slug, project_id, service):
|
||||
try:
|
||||
# Get the project
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
# Get the project
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
|
||||
# Get the default state
|
||||
# Get the default state
|
||||
default_state = State.objects.filter(
|
||||
~Q(name="Triage"), project_id=project_id, default=True
|
||||
).first()
|
||||
# if there is no default state assign any random state
|
||||
if default_state is None:
|
||||
default_state = State.objects.filter(
|
||||
~Q(name="Triage"), project_id=project_id, default=True
|
||||
~Q(name="Triage"), project_id=project_id
|
||||
).first()
|
||||
# if there is no default state assign any random state
|
||||
if default_state is None:
|
||||
default_state = State.objects.filter(
|
||||
~Q(name="Triage"), sproject_id=project_id
|
||||
).first()
|
||||
|
||||
# Get the maximum sequence_id
|
||||
last_id = IssueSequence.objects.filter(project_id=project_id).aggregate(
|
||||
largest=Max("sequence")
|
||||
)["largest"]
|
||||
# Get the maximum sequence_id
|
||||
last_id = IssueSequence.objects.filter(project_id=project_id).aggregate(
|
||||
largest=Max("sequence")
|
||||
)["largest"]
|
||||
|
||||
last_id = 1 if last_id is None else last_id + 1
|
||||
last_id = 1 if last_id is None else last_id + 1
|
||||
|
||||
# Get the maximum sort order
|
||||
largest_sort_order = Issue.objects.filter(
|
||||
project_id=project_id, state=default_state
|
||||
).aggregate(largest=Max("sort_order"))["largest"]
|
||||
# Get the maximum sort order
|
||||
largest_sort_order = Issue.objects.filter(
|
||||
project_id=project_id, state=default_state
|
||||
).aggregate(largest=Max("sort_order"))["largest"]
|
||||
|
||||
largest_sort_order = (
|
||||
65535 if largest_sort_order is None else largest_sort_order + 10000
|
||||
)
|
||||
largest_sort_order = (
|
||||
65535 if largest_sort_order is None else largest_sort_order + 10000
|
||||
)
|
||||
|
||||
# Get the issues_data
|
||||
issues_data = request.data.get("issues_data", [])
|
||||
|
||||
if not len(issues_data):
|
||||
return Response(
|
||||
{"error": "Issue data is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Issues
|
||||
bulk_issues = []
|
||||
for issue_data in issues_data:
|
||||
bulk_issues.append(
|
||||
Issue(
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
state_id=issue_data.get("state")
|
||||
if issue_data.get("state", False)
|
||||
else default_state.id,
|
||||
name=issue_data.get("name", "Issue Created through Bulk"),
|
||||
description_html=issue_data.get("description_html", "<p></p>"),
|
||||
description_stripped=(
|
||||
None
|
||||
if (
|
||||
issue_data.get("description_html") == ""
|
||||
or issue_data.get("description_html") is None
|
||||
)
|
||||
else strip_tags(issue_data.get("description_html"))
|
||||
),
|
||||
sequence_id=last_id,
|
||||
sort_order=largest_sort_order,
|
||||
start_date=issue_data.get("start_date", None),
|
||||
target_date=issue_data.get("target_date", None),
|
||||
priority=issue_data.get("priority", None),
|
||||
created_by=request.user,
|
||||
)
|
||||
)
|
||||
|
||||
largest_sort_order = largest_sort_order + 10000
|
||||
last_id = last_id + 1
|
||||
|
||||
issues = Issue.objects.bulk_create(
|
||||
bulk_issues,
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Sequences
|
||||
_ = IssueSequence.objects.bulk_create(
|
||||
[
|
||||
IssueSequence(
|
||||
issue=issue,
|
||||
sequence=issue.sequence_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
)
|
||||
for issue in issues
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
# Attach Labels
|
||||
bulk_issue_labels = []
|
||||
for issue, issue_data in zip(issues, issues_data):
|
||||
labels_list = issue_data.get("labels_list", [])
|
||||
bulk_issue_labels = bulk_issue_labels + [
|
||||
IssueLabel(
|
||||
issue=issue,
|
||||
label_id=label_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for label_id in labels_list
|
||||
]
|
||||
|
||||
_ = IssueLabel.objects.bulk_create(
|
||||
bulk_issue_labels, batch_size=100, ignore_conflicts=True
|
||||
)
|
||||
|
||||
# Attach Assignees
|
||||
bulk_issue_assignees = []
|
||||
for issue, issue_data in zip(issues, issues_data):
|
||||
assignees_list = issue_data.get("assignees_list", [])
|
||||
bulk_issue_assignees = bulk_issue_assignees + [
|
||||
IssueAssignee(
|
||||
issue=issue,
|
||||
assignee_id=assignee_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for assignee_id in assignees_list
|
||||
]
|
||||
|
||||
_ = IssueAssignee.objects.bulk_create(
|
||||
bulk_issue_assignees, batch_size=100, ignore_conflicts=True
|
||||
)
|
||||
|
||||
# Track the issue activities
|
||||
IssueActivity.objects.bulk_create(
|
||||
[
|
||||
IssueActivity(
|
||||
issue=issue,
|
||||
actor=request.user,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
comment=f"{request.user.email} importer the issue from {service}",
|
||||
verb="created",
|
||||
created_by=request.user,
|
||||
)
|
||||
for issue in issues
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
# Create Comments
|
||||
bulk_issue_comments = []
|
||||
for issue, issue_data in zip(issues, issues_data):
|
||||
comments_list = issue_data.get("comments_list", [])
|
||||
bulk_issue_comments = bulk_issue_comments + [
|
||||
IssueComment(
|
||||
issue=issue,
|
||||
comment_html=comment.get("comment_html", "<p></p>"),
|
||||
actor=request.user,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for comment in comments_list
|
||||
]
|
||||
|
||||
_ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100)
|
||||
|
||||
# Attach Links
|
||||
_ = IssueLink.objects.bulk_create(
|
||||
[
|
||||
IssueLink(
|
||||
issue=issue,
|
||||
url=issue_data.get("link", {}).get("url", "https://github.com"),
|
||||
title=issue_data.get("link", {}).get("title", "Original Issue"),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for issue, issue_data in zip(issues, issues_data)
|
||||
]
|
||||
)
|
||||
# Get the issues_data
|
||||
issues_data = request.data.get("issues_data", [])
|
||||
|
||||
if not len(issues_data):
|
||||
return Response(
|
||||
{"issues": IssueFlatSerializer(issues, many=True).data},
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
except Project.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
{"error": "Issue data is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Issues
|
||||
bulk_issues = []
|
||||
for issue_data in issues_data:
|
||||
bulk_issues.append(
|
||||
Issue(
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
state_id=issue_data.get("state")
|
||||
if issue_data.get("state", False)
|
||||
else default_state.id,
|
||||
name=issue_data.get("name", "Issue Created through Bulk"),
|
||||
description_html=issue_data.get("description_html", "<p></p>"),
|
||||
description_stripped=(
|
||||
None
|
||||
if (
|
||||
issue_data.get("description_html") == ""
|
||||
or issue_data.get("description_html") is None
|
||||
)
|
||||
else strip_tags(issue_data.get("description_html"))
|
||||
),
|
||||
sequence_id=last_id,
|
||||
sort_order=largest_sort_order,
|
||||
start_date=issue_data.get("start_date", None),
|
||||
target_date=issue_data.get("target_date", None),
|
||||
priority=issue_data.get("priority", "none"),
|
||||
created_by=request.user,
|
||||
)
|
||||
)
|
||||
|
||||
largest_sort_order = largest_sort_order + 10000
|
||||
last_id = last_id + 1
|
||||
|
||||
issues = Issue.objects.bulk_create(
|
||||
bulk_issues,
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Sequences
|
||||
_ = IssueSequence.objects.bulk_create(
|
||||
[
|
||||
IssueSequence(
|
||||
issue=issue,
|
||||
sequence=issue.sequence_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
)
|
||||
for issue in issues
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
# Attach Labels
|
||||
bulk_issue_labels = []
|
||||
for issue, issue_data in zip(issues, issues_data):
|
||||
labels_list = issue_data.get("labels_list", [])
|
||||
bulk_issue_labels = bulk_issue_labels + [
|
||||
IssueLabel(
|
||||
issue=issue,
|
||||
label_id=label_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for label_id in labels_list
|
||||
]
|
||||
|
||||
_ = IssueLabel.objects.bulk_create(
|
||||
bulk_issue_labels, batch_size=100, ignore_conflicts=True
|
||||
)
|
||||
|
||||
# Attach Assignees
|
||||
bulk_issue_assignees = []
|
||||
for issue, issue_data in zip(issues, issues_data):
|
||||
assignees_list = issue_data.get("assignees_list", [])
|
||||
bulk_issue_assignees = bulk_issue_assignees + [
|
||||
IssueAssignee(
|
||||
issue=issue,
|
||||
assignee_id=assignee_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for assignee_id in assignees_list
|
||||
]
|
||||
|
||||
_ = IssueAssignee.objects.bulk_create(
|
||||
bulk_issue_assignees, batch_size=100, ignore_conflicts=True
|
||||
)
|
||||
|
||||
# Track the issue activities
|
||||
IssueActivity.objects.bulk_create(
|
||||
[
|
||||
IssueActivity(
|
||||
issue=issue,
|
||||
actor=request.user,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
comment=f"imported the issue from {service}",
|
||||
verb="created",
|
||||
created_by=request.user,
|
||||
)
|
||||
for issue in issues
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
# Create Comments
|
||||
bulk_issue_comments = []
|
||||
for issue, issue_data in zip(issues, issues_data):
|
||||
comments_list = issue_data.get("comments_list", [])
|
||||
bulk_issue_comments = bulk_issue_comments + [
|
||||
IssueComment(
|
||||
issue=issue,
|
||||
comment_html=comment.get("comment_html", "<p></p>"),
|
||||
actor=request.user,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for comment in comments_list
|
||||
]
|
||||
|
||||
_ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100)
|
||||
|
||||
# Attach Links
|
||||
_ = IssueLink.objects.bulk_create(
|
||||
[
|
||||
IssueLink(
|
||||
issue=issue,
|
||||
url=issue_data.get("link", {}).get("url", "https://github.com"),
|
||||
title=issue_data.get("link", {}).get("title", "Original Issue"),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for issue, issue_data in zip(issues, issues_data)
|
||||
]
|
||||
)
|
||||
|
||||
return Response(
|
||||
{"issues": IssueFlatSerializer(issues, many=True).data},
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
|
||||
class BulkImportModulesEndpoint(BaseAPIView):
|
||||
def post(self, request, slug, project_id, service):
|
||||
try:
|
||||
modules_data = request.data.get("modules_data", [])
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
modules_data = request.data.get("modules_data", [])
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
|
||||
modules = Module.objects.bulk_create(
|
||||
modules = Module.objects.bulk_create(
|
||||
[
|
||||
Module(
|
||||
name=module.get("name", uuid.uuid4().hex),
|
||||
description=module.get("description", ""),
|
||||
start_date=module.get("start_date", None),
|
||||
target_date=module.get("target_date", None),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for module in modules_data
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
modules = Module.objects.filter(id__in=[module.id for module in modules])
|
||||
|
||||
if len(modules) == len(modules_data):
|
||||
_ = ModuleLink.objects.bulk_create(
|
||||
[
|
||||
Module(
|
||||
name=module.get("name", uuid.uuid4().hex),
|
||||
description=module.get("description", ""),
|
||||
start_date=module.get("start_date", None),
|
||||
target_date=module.get("target_date", None),
|
||||
ModuleLink(
|
||||
module=module,
|
||||
url=module_data.get("link", {}).get(
|
||||
"url", "https://plane.so"
|
||||
),
|
||||
title=module_data.get("link", {}).get(
|
||||
"title", "Original Issue"
|
||||
),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for module in modules_data
|
||||
for module, module_data in zip(modules, modules_data)
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
modules = Module.objects.filter(id__in=[module.id for module in modules])
|
||||
bulk_module_issues = []
|
||||
for module, module_data in zip(modules, modules_data):
|
||||
module_issues_list = module_data.get("module_issues_list", [])
|
||||
bulk_module_issues = bulk_module_issues + [
|
||||
ModuleIssue(
|
||||
issue_id=issue,
|
||||
module=module,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for issue in module_issues_list
|
||||
]
|
||||
|
||||
if len(modules) == len(modules_data):
|
||||
_ = ModuleLink.objects.bulk_create(
|
||||
[
|
||||
ModuleLink(
|
||||
module=module,
|
||||
url=module_data.get("link", {}).get(
|
||||
"url", "https://plane.so"
|
||||
),
|
||||
title=module_data.get("link", {}).get(
|
||||
"title", "Original Issue"
|
||||
),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for module, module_data in zip(modules, modules_data)
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
bulk_module_issues = []
|
||||
for module, module_data in zip(modules, modules_data):
|
||||
module_issues_list = module_data.get("module_issues_list", [])
|
||||
bulk_module_issues = bulk_module_issues + [
|
||||
ModuleIssue(
|
||||
issue_id=issue,
|
||||
module=module,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
)
|
||||
for issue in module_issues_list
|
||||
]
|
||||
|
||||
_ = ModuleIssue.objects.bulk_create(
|
||||
bulk_module_issues, batch_size=100, ignore_conflicts=True
|
||||
)
|
||||
|
||||
serializer = ModuleSerializer(modules, many=True)
|
||||
return Response(
|
||||
{"modules": serializer.data}, status=status.HTTP_201_CREATED
|
||||
)
|
||||
|
||||
else:
|
||||
return Response(
|
||||
{"message": "Modules created but issues could not be imported"},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Project.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||
_ = ModuleIssue.objects.bulk_create(
|
||||
bulk_module_issues, batch_size=100, ignore_conflicts=True
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
|
||||
serializer = ModuleSerializer(modules, many=True)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
{"modules": serializer.data}, status=status.HTTP_201_CREATED
|
||||
)
|
||||
|
||||
else:
|
||||
return Response(
|
||||
{"message": "Modules created but issues could not be imported"},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@@ -15,7 +15,6 @@ from sentry_sdk import capture_exception
|
||||
from .base import BaseViewSet
|
||||
from plane.api.permissions import ProjectBasePermission, ProjectLitePermission
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
Inbox,
|
||||
InboxIssue,
|
||||
Issue,
|
||||
@@ -23,6 +22,7 @@ from plane.db.models import (
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
ProjectMember,
|
||||
ProjectDeployBoard,
|
||||
)
|
||||
from plane.api.serializers import (
|
||||
IssueSerializer,
|
||||
@@ -64,24 +64,15 @@ class InboxViewSet(BaseViewSet):
|
||||
serializer.save(project_id=self.kwargs.get("project_id"))
|
||||
|
||||
def destroy(self, request, slug, project_id, pk):
|
||||
try:
|
||||
inbox = Inbox.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
# Handle default inbox delete
|
||||
if inbox.is_default:
|
||||
return Response(
|
||||
{"error": "You cannot delete the default inbox"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
inbox.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
inbox = Inbox.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
# Handle default inbox delete
|
||||
if inbox.is_default:
|
||||
return Response(
|
||||
{"error": "Something went wronf please try again later"},
|
||||
{"error": "You cannot delete the default inbox"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
inbox.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class InboxIssueViewSet(BaseViewSet):
|
||||
@@ -110,271 +101,511 @@ class InboxIssueViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
def list(self, request, slug, project_id, inbox_id):
|
||||
try:
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issues = (
|
||||
Issue.objects.filter(
|
||||
issue_inbox__inbox_id=inbox_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(bridge_id=F("issue_inbox__id"))
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels")
|
||||
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_inbox",
|
||||
queryset=InboxIssue.objects.only(
|
||||
"status", "duplicate_to", "snoozed_till", "source"
|
||||
),
|
||||
)
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issues = (
|
||||
Issue.objects.filter(
|
||||
issue_inbox__inbox_id=inbox_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(bridge_id=F("issue_inbox__id"))
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels")
|
||||
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_inbox",
|
||||
queryset=InboxIssue.objects.only(
|
||||
"status", "duplicate_to", "snoozed_till", "source"
|
||||
),
|
||||
)
|
||||
)
|
||||
issues_data = IssueStateInboxSerializer(issues, many=True).data
|
||||
return Response(
|
||||
issues_data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
)
|
||||
issues_data = IssueStateInboxSerializer(issues, many=True).data
|
||||
return Response(
|
||||
issues_data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, inbox_id):
|
||||
try:
|
||||
if not request.data.get("issue", {}).get("name", False):
|
||||
return Response(
|
||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Check for valid priority
|
||||
if not request.data.get("issue", {}).get("priority", None) in [
|
||||
"low",
|
||||
"medium",
|
||||
"high",
|
||||
"urgent",
|
||||
None,
|
||||
]:
|
||||
return Response(
|
||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Create or get state
|
||||
state, _ = State.objects.get_or_create(
|
||||
name="Triage",
|
||||
group="backlog",
|
||||
description="Default state for managing all Inbox Issues",
|
||||
project_id=project_id,
|
||||
color="#ff7700",
|
||||
)
|
||||
|
||||
# create an issue
|
||||
issue = Issue.objects.create(
|
||||
name=request.data.get("issue", {}).get("name"),
|
||||
description=request.data.get("issue", {}).get("description", {}),
|
||||
description_html=request.data.get("issue", {}).get(
|
||||
"description_html", "<p></p>"
|
||||
),
|
||||
priority=request.data.get("issue", {}).get("priority", "low"),
|
||||
project_id=project_id,
|
||||
state=state,
|
||||
)
|
||||
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
type="issue.activity.created",
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
)
|
||||
# create an inbox issue
|
||||
InboxIssue.objects.create(
|
||||
inbox_id=inbox_id,
|
||||
project_id=project_id,
|
||||
issue=issue,
|
||||
source=request.data.get("source", "in-app"),
|
||||
)
|
||||
|
||||
serializer = IssueStateInboxSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
if not request.data.get("issue", {}).get("name", False):
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Check for valid priority
|
||||
if not request.data.get("issue", {}).get("priority", "none") in [
|
||||
"low",
|
||||
"medium",
|
||||
"high",
|
||||
"urgent",
|
||||
"none",
|
||||
]:
|
||||
return Response(
|
||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Create or get state
|
||||
state, _ = State.objects.get_or_create(
|
||||
name="Triage",
|
||||
group="backlog",
|
||||
description="Default state for managing all Inbox Issues",
|
||||
project_id=project_id,
|
||||
color="#ff7700",
|
||||
)
|
||||
|
||||
# create an issue
|
||||
issue = Issue.objects.create(
|
||||
name=request.data.get("issue", {}).get("name"),
|
||||
description=request.data.get("issue", {}).get("description", {}),
|
||||
description_html=request.data.get("issue", {}).get(
|
||||
"description_html", "<p></p>"
|
||||
),
|
||||
priority=request.data.get("issue", {}).get("priority", "low"),
|
||||
project_id=project_id,
|
||||
state=state,
|
||||
)
|
||||
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
type="issue.activity.created",
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
# create an inbox issue
|
||||
InboxIssue.objects.create(
|
||||
inbox_id=inbox_id,
|
||||
project_id=project_id,
|
||||
issue=issue,
|
||||
source=request.data.get("source", "in-app"),
|
||||
)
|
||||
|
||||
serializer = IssueStateInboxSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def partial_update(self, request, slug, project_id, inbox_id, pk):
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
# Get the project member
|
||||
project_member = ProjectMember.objects.get(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
)
|
||||
# Only project members admins and created_by users can access this endpoint
|
||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(
|
||||
request.user.id
|
||||
):
|
||||
return Response(
|
||||
{"error": "You cannot edit inbox issues"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, project_id, inbox_id, pk):
|
||||
try:
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
# Get issue data
|
||||
issue_data = request.data.pop("issue", False)
|
||||
|
||||
if bool(issue_data):
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
# Get the project member
|
||||
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
|
||||
# Only project members admins and created_by users can access this endpoint
|
||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
|
||||
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
# Only allow guests and viewers to edit name and description
|
||||
if project_member.role <= 10:
|
||||
# viewers and guests since only viewers and guests
|
||||
issue_data = {
|
||||
"name": issue_data.get("name", issue.name),
|
||||
"description_html": issue_data.get(
|
||||
"description_html", issue.description_html
|
||||
),
|
||||
"description": issue_data.get("description", issue.description),
|
||||
}
|
||||
|
||||
# Get issue data
|
||||
issue_data = request.data.pop("issue", False)
|
||||
issue_serializer = IssueCreateSerializer(
|
||||
issue, data=issue_data, partial=True
|
||||
)
|
||||
|
||||
if bool(issue_data):
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
# Only allow guests and viewers to edit name and description
|
||||
if project_member.role <= 10:
|
||||
# viewers and guests since only viewers and guests
|
||||
issue_data = {
|
||||
"name": issue_data.get("name", issue.name),
|
||||
"description_html": issue_data.get("description_html", issue.description_html),
|
||||
"description": issue_data.get("description", issue.description)
|
||||
}
|
||||
|
||||
issue_serializer = IssueCreateSerializer(
|
||||
issue, data=issue_data, partial=True
|
||||
if issue_serializer.is_valid():
|
||||
current_instance = issue
|
||||
# Log all the updates
|
||||
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
|
||||
if issue is not None:
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(current_instance).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
issue_serializer.save()
|
||||
else:
|
||||
return Response(
|
||||
issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
if issue_serializer.is_valid():
|
||||
current_instance = issue
|
||||
# Log all the updates
|
||||
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
|
||||
if issue is not None:
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(current_instance).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
)
|
||||
issue_serializer.save()
|
||||
else:
|
||||
return Response(
|
||||
issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
# Only project admins and members can edit inbox issue attributes
|
||||
if project_member.role > 10:
|
||||
serializer = InboxIssueSerializer(
|
||||
inbox_issue, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
# Update the issue state if the issue is rejected or marked as duplicate
|
||||
if serializer.data["status"] in [-1, 2]:
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
state = State.objects.filter(
|
||||
group="cancelled", workspace__slug=slug, project_id=project_id
|
||||
).first()
|
||||
if state is not None:
|
||||
issue.state = state
|
||||
issue.save()
|
||||
|
||||
# Update the issue state if it is accepted
|
||||
if serializer.data["status"] in [1]:
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
|
||||
# Only project admins and members can edit inbox issue attributes
|
||||
if project_member.role > 10:
|
||||
serializer = InboxIssueSerializer(
|
||||
inbox_issue, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
# Update the issue state if the issue is rejected or marked as duplicate
|
||||
if serializer.data["status"] in [-1, 2]:
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
# Update the issue state only if it is in triage state
|
||||
if issue.state.name == "Triage":
|
||||
# Move to default state
|
||||
state = State.objects.filter(
|
||||
group="cancelled", workspace__slug=slug, project_id=project_id
|
||||
workspace__slug=slug, project_id=project_id, default=True
|
||||
).first()
|
||||
if state is not None:
|
||||
issue.state = state
|
||||
issue.save()
|
||||
|
||||
# Update the issue state if it is accepted
|
||||
if serializer.data["status"] in [1]:
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
|
||||
# Update the issue state only if it is in triage state
|
||||
if issue.state.name == "Triage":
|
||||
# Move to default state
|
||||
state = State.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, default=True
|
||||
).first()
|
||||
if state is not None:
|
||||
issue.state = state
|
||||
issue.save()
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK)
|
||||
except InboxIssue.DoesNotExist:
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Inbox Issue does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
def retrieve(self, request, slug, project_id, inbox_id, pk):
|
||||
try:
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = IssueStateInboxSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = IssueStateInboxSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def destroy(self, request, slug, project_id, inbox_id, pk):
|
||||
try:
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
# Get the project member
|
||||
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
# Get the project member
|
||||
project_member = ProjectMember.objects.get(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
|
||||
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
inbox_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except InboxIssue.DoesNotExist:
|
||||
return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(
|
||||
request.user.id
|
||||
):
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
{"error": "You cannot delete inbox issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
)
|
||||
|
||||
# Check the issue status
|
||||
if inbox_issue.status in [-2, -1, 0, 2]:
|
||||
# Delete the issue also
|
||||
Issue.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id
|
||||
).delete()
|
||||
|
||||
inbox_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class InboxIssuePublicViewSet(BaseViewSet):
|
||||
serializer_class = InboxIssueSerializer
|
||||
model = InboxIssue
|
||||
|
||||
filterset_fields = [
|
||||
"status",
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
)
|
||||
if project_deploy_board is not None:
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
inbox_id=self.kwargs.get("inbox_id"),
|
||||
)
|
||||
.select_related("issue", "workspace", "project")
|
||||
)
|
||||
return InboxIssue.objects.none()
|
||||
|
||||
def list(self, request, slug, project_id, inbox_id):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response(
|
||||
{"error": "Inbox is not enabled for this Project Board"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issues = (
|
||||
Issue.objects.filter(
|
||||
issue_inbox__inbox_id=inbox_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(bridge_id=F("issue_inbox__id"))
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels")
|
||||
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_inbox",
|
||||
queryset=InboxIssue.objects.only(
|
||||
"status", "duplicate_to", "snoozed_till", "source"
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
issues_data = IssueStateInboxSerializer(issues, many=True).data
|
||||
return Response(
|
||||
issues_data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, inbox_id):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response(
|
||||
{"error": "Inbox is not enabled for this Project Board"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not request.data.get("issue", {}).get("name", False):
|
||||
return Response(
|
||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Check for valid priority
|
||||
if not request.data.get("issue", {}).get("priority", "none") in [
|
||||
"low",
|
||||
"medium",
|
||||
"high",
|
||||
"urgent",
|
||||
"none",
|
||||
]:
|
||||
return Response(
|
||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Create or get state
|
||||
state, _ = State.objects.get_or_create(
|
||||
name="Triage",
|
||||
group="backlog",
|
||||
description="Default state for managing all Inbox Issues",
|
||||
project_id=project_id,
|
||||
color="#ff7700",
|
||||
)
|
||||
|
||||
# create an issue
|
||||
issue = Issue.objects.create(
|
||||
name=request.data.get("issue", {}).get("name"),
|
||||
description=request.data.get("issue", {}).get("description", {}),
|
||||
description_html=request.data.get("issue", {}).get(
|
||||
"description_html", "<p></p>"
|
||||
),
|
||||
priority=request.data.get("issue", {}).get("priority", "low"),
|
||||
project_id=project_id,
|
||||
state=state,
|
||||
)
|
||||
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
type="issue.activity.created",
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
# create an inbox issue
|
||||
InboxIssue.objects.create(
|
||||
inbox_id=inbox_id,
|
||||
project_id=project_id,
|
||||
issue=issue,
|
||||
source=request.data.get("source", "in-app"),
|
||||
)
|
||||
|
||||
serializer = IssueStateInboxSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def partial_update(self, request, slug, project_id, inbox_id, pk):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response(
|
||||
{"error": "Inbox is not enabled for this Project Board"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
# Get the project member
|
||||
if str(inbox_issue.created_by_id) != str(request.user.id):
|
||||
return Response(
|
||||
{"error": "You cannot edit inbox issues"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get issue data
|
||||
issue_data = request.data.pop("issue", False)
|
||||
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
# viewers and guests since only viewers and guests
|
||||
issue_data = {
|
||||
"name": issue_data.get("name", issue.name),
|
||||
"description_html": issue_data.get(
|
||||
"description_html", issue.description_html
|
||||
),
|
||||
"description": issue_data.get("description", issue.description),
|
||||
}
|
||||
|
||||
issue_serializer = IssueCreateSerializer(issue, data=issue_data, partial=True)
|
||||
|
||||
if issue_serializer.is_valid():
|
||||
current_instance = issue
|
||||
# Log all the updates
|
||||
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
|
||||
if issue is not None:
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(current_instance).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
issue_serializer.save()
|
||||
return Response(issue_serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def retrieve(self, request, slug, project_id, inbox_id, pk):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response(
|
||||
{"error": "Inbox is not enabled for this Project Board"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = IssueStateInboxSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def destroy(self, request, slug, project_id, inbox_id, pk):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response(
|
||||
{"error": "Inbox is not enabled for this Project Board"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
|
||||
if str(inbox_issue.created_by_id) != str(request.user.id):
|
||||
return Response(
|
||||
{"error": "You cannot delete inbox issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
inbox_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
# Python improts
|
||||
import uuid
|
||||
|
||||
import requests
|
||||
# Django imports
|
||||
from django.db import IntegrityError
|
||||
from django.contrib.auth.hashers import make_password
|
||||
|
||||
# Third party imports
|
||||
@@ -26,73 +25,47 @@ from plane.utils.integrations.github import (
|
||||
delete_github_installation,
|
||||
)
|
||||
from plane.api.permissions import WorkSpaceAdminPermission
|
||||
|
||||
from plane.utils.integrations.slack import slack_oauth
|
||||
|
||||
class IntegrationViewSet(BaseViewSet):
|
||||
serializer_class = IntegrationSerializer
|
||||
model = Integration
|
||||
|
||||
def create(self, request):
|
||||
try:
|
||||
serializer = IntegrationSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
serializer = IntegrationSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def partial_update(self, request, pk):
|
||||
try:
|
||||
integration = Integration.objects.get(pk=pk)
|
||||
if integration.verified:
|
||||
return Response(
|
||||
{"error": "Verified integrations cannot be updated"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = IntegrationSerializer(
|
||||
integration, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
except Integration.DoesNotExist:
|
||||
integration = Integration.objects.get(pk=pk)
|
||||
if integration.verified:
|
||||
return Response(
|
||||
{"error": "Integration Does not exist"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
{"error": "Verified integrations cannot be updated"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def destroy(self, request, pk):
|
||||
try:
|
||||
integration = Integration.objects.get(pk=pk)
|
||||
if integration.verified:
|
||||
return Response(
|
||||
{"error": "Verified integrations cannot be updated"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
serializer = IntegrationSerializer(
|
||||
integration, data=request.data, partial=True
|
||||
)
|
||||
|
||||
integration.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except Integration.DoesNotExist:
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def destroy(self, request, pk):
|
||||
integration = Integration.objects.get(pk=pk)
|
||||
if integration.verified:
|
||||
return Response(
|
||||
{"error": "Integration Does not exist"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
{"error": "Verified integrations cannot be updated"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
integration.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class WorkspaceIntegrationViewSet(BaseViewSet):
|
||||
serializer_class = WorkspaceIntegrationSerializer
|
||||
@@ -111,119 +84,88 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
def create(self, request, slug, provider):
|
||||
try:
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
integration = Integration.objects.get(provider=provider)
|
||||
config = {}
|
||||
if provider == "github":
|
||||
installation_id = request.data.get("installation_id", None)
|
||||
if not installation_id:
|
||||
return Response(
|
||||
{"error": "Installation ID is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
metadata = get_github_metadata(installation_id)
|
||||
config = {"installation_id": installation_id}
|
||||
|
||||
if provider == "slack":
|
||||
metadata = request.data.get("metadata", {})
|
||||
access_token = metadata.get("access_token", False)
|
||||
team_id = metadata.get("team", {}).get("id", False)
|
||||
if not metadata or not access_token or not team_id:
|
||||
return Response(
|
||||
{"error": "Access token and team id is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
config = {"team_id": team_id, "access_token": access_token}
|
||||
|
||||
# Create a bot user
|
||||
bot_user = User.objects.create(
|
||||
email=f"{uuid.uuid4().hex}@plane.so",
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
is_bot=True,
|
||||
first_name=integration.title,
|
||||
avatar=integration.avatar_url
|
||||
if integration.avatar_url is not None
|
||||
else "",
|
||||
)
|
||||
|
||||
# Create an API Token for the bot user
|
||||
api_token = APIToken.objects.create(
|
||||
user=bot_user,
|
||||
user_type=1, # bot user
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.create(
|
||||
workspace=workspace,
|
||||
integration=integration,
|
||||
actor=bot_user,
|
||||
api_token=api_token,
|
||||
metadata=metadata,
|
||||
config=config,
|
||||
)
|
||||
|
||||
# Add bot user as a member of workspace
|
||||
_ = WorkspaceMember.objects.create(
|
||||
workspace=workspace_integration.workspace,
|
||||
member=bot_user,
|
||||
role=20,
|
||||
)
|
||||
return Response(
|
||||
WorkspaceIntegrationSerializer(workspace_integration).data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
except IntegrityError as e:
|
||||
if "already exists" in str(e):
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
integration = Integration.objects.get(provider=provider)
|
||||
config = {}
|
||||
if provider == "github":
|
||||
installation_id = request.data.get("installation_id", None)
|
||||
if not installation_id:
|
||||
return Response(
|
||||
{"error": "Integration is already active in the workspace"},
|
||||
status=status.HTTP_410_GONE,
|
||||
)
|
||||
else:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
{"error": "Installation ID is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except (Workspace.DoesNotExist, Integration.DoesNotExist) as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Workspace or Integration not found"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
metadata = get_github_metadata(installation_id)
|
||||
config = {"installation_id": installation_id}
|
||||
|
||||
if provider == "slack":
|
||||
code = request.data.get("code", False)
|
||||
|
||||
if not code:
|
||||
return Response({"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
slack_response = slack_oauth(code=code)
|
||||
|
||||
metadata = slack_response
|
||||
access_token = metadata.get("access_token", False)
|
||||
team_id = metadata.get("team", {}).get("id", False)
|
||||
if not metadata or not access_token or not team_id:
|
||||
return Response(
|
||||
{"error": "Slack could not be installed. Please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
config = {"team_id": team_id, "access_token": access_token}
|
||||
|
||||
# Create a bot user
|
||||
bot_user = User.objects.create(
|
||||
email=f"{uuid.uuid4().hex}@plane.so",
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
is_bot=True,
|
||||
first_name=integration.title,
|
||||
avatar=integration.avatar_url
|
||||
if integration.avatar_url is not None
|
||||
else "",
|
||||
)
|
||||
|
||||
# Create an API Token for the bot user
|
||||
api_token = APIToken.objects.create(
|
||||
user=bot_user,
|
||||
user_type=1, # bot user
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.create(
|
||||
workspace=workspace,
|
||||
integration=integration,
|
||||
actor=bot_user,
|
||||
api_token=api_token,
|
||||
metadata=metadata,
|
||||
config=config,
|
||||
)
|
||||
|
||||
# Add bot user as a member of workspace
|
||||
_ = WorkspaceMember.objects.create(
|
||||
workspace=workspace_integration.workspace,
|
||||
member=bot_user,
|
||||
role=20,
|
||||
)
|
||||
return Response(
|
||||
WorkspaceIntegrationSerializer(workspace_integration).data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, pk):
|
||||
try:
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
pk=pk, workspace__slug=slug
|
||||
)
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
pk=pk, workspace__slug=slug
|
||||
)
|
||||
|
||||
if workspace_integration.integration.provider == "github":
|
||||
installation_id = workspace_integration.config.get(
|
||||
"installation_id", False
|
||||
)
|
||||
if installation_id:
|
||||
delete_github_installation(installation_id=installation_id)
|
||||
|
||||
workspace_integration.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
except WorkspaceIntegration.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Workspace Integration Does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
if workspace_integration.integration.provider == "github":
|
||||
installation_id = workspace_integration.config.get(
|
||||
"installation_id", False
|
||||
)
|
||||
if installation_id:
|
||||
delete_github_installation(installation_id=installation_id)
|
||||
|
||||
workspace_integration.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -30,31 +30,25 @@ class GithubRepositoriesEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
def get(self, request, slug, workspace_integration_id):
|
||||
try:
|
||||
page = request.GET.get("page", 1)
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
workspace__slug=slug, pk=workspace_integration_id
|
||||
)
|
||||
page = request.GET.get("page", 1)
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
workspace__slug=slug, pk=workspace_integration_id
|
||||
)
|
||||
|
||||
if workspace_integration.integration.provider != "github":
|
||||
return Response(
|
||||
{"error": "Not a github integration"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
|
||||
repositories_url = (
|
||||
workspace_integration.metadata["repositories_url"]
|
||||
+ f"?per_page=100&page={page}"
|
||||
)
|
||||
repositories = get_github_repos(access_tokens_url, repositories_url)
|
||||
return Response(repositories, status=status.HTTP_200_OK)
|
||||
except WorkspaceIntegration.DoesNotExist:
|
||||
if workspace_integration.integration.provider != "github":
|
||||
return Response(
|
||||
{"error": "Workspace Integration Does not exists"},
|
||||
{"error": "Not a github integration"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
|
||||
repositories_url = (
|
||||
workspace_integration.metadata["repositories_url"]
|
||||
+ f"?per_page=100&page={page}"
|
||||
)
|
||||
repositories = get_github_repos(access_tokens_url, repositories_url)
|
||||
return Response(repositories, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class GithubRepositorySyncViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
@@ -76,89 +70,76 @@ class GithubRepositorySyncViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, workspace_integration_id):
|
||||
try:
|
||||
name = request.data.get("name", False)
|
||||
url = request.data.get("url", False)
|
||||
config = request.data.get("config", {})
|
||||
repository_id = request.data.get("repository_id", False)
|
||||
owner = request.data.get("owner", False)
|
||||
name = request.data.get("name", False)
|
||||
url = request.data.get("url", False)
|
||||
config = request.data.get("config", {})
|
||||
repository_id = request.data.get("repository_id", False)
|
||||
owner = request.data.get("owner", False)
|
||||
|
||||
if not name or not url or not repository_id or not owner:
|
||||
return Response(
|
||||
{"error": "Name, url, repository_id and owner are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the workspace integration
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
pk=workspace_integration_id
|
||||
)
|
||||
|
||||
# Delete the old repository object
|
||||
GithubRepositorySync.objects.filter(
|
||||
project_id=project_id, workspace__slug=slug
|
||||
).delete()
|
||||
GithubRepository.objects.filter(
|
||||
project_id=project_id, workspace__slug=slug
|
||||
).delete()
|
||||
|
||||
# Create repository
|
||||
repo = GithubRepository.objects.create(
|
||||
name=name,
|
||||
url=url,
|
||||
config=config,
|
||||
repository_id=repository_id,
|
||||
owner=owner,
|
||||
project_id=project_id,
|
||||
)
|
||||
|
||||
# Create a Label for github
|
||||
label = Label.objects.filter(
|
||||
name="GitHub",
|
||||
project_id=project_id,
|
||||
).first()
|
||||
|
||||
if label is None:
|
||||
label = Label.objects.create(
|
||||
name="GitHub",
|
||||
project_id=project_id,
|
||||
description="Label to sync Plane issues with GitHub issues",
|
||||
color="#003773",
|
||||
)
|
||||
|
||||
# Create repo sync
|
||||
repo_sync = GithubRepositorySync.objects.create(
|
||||
repository=repo,
|
||||
workspace_integration=workspace_integration,
|
||||
actor=workspace_integration.actor,
|
||||
credentials=request.data.get("credentials", {}),
|
||||
project_id=project_id,
|
||||
label=label,
|
||||
)
|
||||
|
||||
# Add bot as a member in the project
|
||||
_ = ProjectMember.objects.get_or_create(
|
||||
member=workspace_integration.actor, role=20, project_id=project_id
|
||||
)
|
||||
|
||||
# Return Response
|
||||
if not name or not url or not repository_id or not owner:
|
||||
return Response(
|
||||
GithubRepositorySyncSerializer(repo_sync).data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
except WorkspaceIntegration.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Workspace Integration does not exist"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
{"error": "Name, url, repository_id and owner are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the workspace integration
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
pk=workspace_integration_id
|
||||
)
|
||||
|
||||
# Delete the old repository object
|
||||
GithubRepositorySync.objects.filter(
|
||||
project_id=project_id, workspace__slug=slug
|
||||
).delete()
|
||||
GithubRepository.objects.filter(
|
||||
project_id=project_id, workspace__slug=slug
|
||||
).delete()
|
||||
|
||||
# Create repository
|
||||
repo = GithubRepository.objects.create(
|
||||
name=name,
|
||||
url=url,
|
||||
config=config,
|
||||
repository_id=repository_id,
|
||||
owner=owner,
|
||||
project_id=project_id,
|
||||
)
|
||||
|
||||
# Create a Label for github
|
||||
label = Label.objects.filter(
|
||||
name="GitHub",
|
||||
project_id=project_id,
|
||||
).first()
|
||||
|
||||
if label is None:
|
||||
label = Label.objects.create(
|
||||
name="GitHub",
|
||||
project_id=project_id,
|
||||
description="Label to sync Plane issues with GitHub issues",
|
||||
color="#003773",
|
||||
)
|
||||
|
||||
# Create repo sync
|
||||
repo_sync = GithubRepositorySync.objects.create(
|
||||
repository=repo,
|
||||
workspace_integration=workspace_integration,
|
||||
actor=workspace_integration.actor,
|
||||
credentials=request.data.get("credentials", {}),
|
||||
project_id=project_id,
|
||||
label=label,
|
||||
)
|
||||
|
||||
# Add bot as a member in the project
|
||||
_ = ProjectMember.objects.get_or_create(
|
||||
member=workspace_integration.actor, role=20, project_id=project_id
|
||||
)
|
||||
|
||||
# Return Response
|
||||
return Response(
|
||||
GithubRepositorySyncSerializer(repo_sync).data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
|
||||
class GithubIssueSyncViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
@@ -177,42 +158,30 @@ class GithubIssueSyncViewSet(BaseViewSet):
|
||||
|
||||
class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
|
||||
def post(self, request, slug, project_id, repo_sync_id):
|
||||
try:
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
|
||||
github_issue_syncs = request.data.get("github_issue_syncs", [])
|
||||
github_issue_syncs = GithubIssueSync.objects.bulk_create(
|
||||
[
|
||||
GithubIssueSync(
|
||||
issue_id=github_issue_sync.get("issue"),
|
||||
repo_issue_id=github_issue_sync.get("repo_issue_id"),
|
||||
issue_url=github_issue_sync.get("issue_url"),
|
||||
github_issue_id=github_issue_sync.get("github_issue_id"),
|
||||
repository_sync_id=repo_sync_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
for github_issue_sync in github_issue_syncs
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
github_issue_syncs = request.data.get("github_issue_syncs", [])
|
||||
github_issue_syncs = GithubIssueSync.objects.bulk_create(
|
||||
[
|
||||
GithubIssueSync(
|
||||
issue_id=github_issue_sync.get("issue"),
|
||||
repo_issue_id=github_issue_sync.get("repo_issue_id"),
|
||||
issue_url=github_issue_sync.get("issue_url"),
|
||||
github_issue_id=github_issue_sync.get("github_issue_id"),
|
||||
repository_sync_id=repo_sync_id,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
for github_issue_sync in github_issue_syncs
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
except Project.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Project does not exist"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
class GithubCommentSyncViewSet(BaseViewSet):
|
||||
|
||||
@@ -11,6 +11,7 @@ from plane.api.views import BaseViewSet, BaseAPIView
|
||||
from plane.db.models import SlackProjectSync, WorkspaceIntegration, ProjectMember
|
||||
from plane.api.serializers import SlackProjectSyncSerializer
|
||||
from plane.api.permissions import ProjectBasePermission, ProjectEntityPermission
|
||||
from plane.utils.integrations.slack import slack_oauth
|
||||
|
||||
|
||||
class SlackProjectSyncViewSet(BaseViewSet):
|
||||
@@ -20,40 +21,59 @@ class SlackProjectSyncViewSet(BaseViewSet):
|
||||
serializer_class = SlackProjectSyncSerializer
|
||||
model = SlackProjectSync
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
)
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, workspace_integration_id):
|
||||
try:
|
||||
serializer = SlackProjectSyncSerializer(data=request.data)
|
||||
code = request.data.get("code", False)
|
||||
|
||||
if not code:
|
||||
return Response(
|
||||
{"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
slack_response = slack_oauth(code=code)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
workspace__slug=slug, pk=workspace_integration_id
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
project_id=project_id,
|
||||
workspace_integration_id=workspace_integration_id,
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
pk=workspace_integration_id, workspace__slug=slug
|
||||
)
|
||||
slack_project_sync = SlackProjectSync.objects.create(
|
||||
access_token=slack_response.get("access_token"),
|
||||
scopes=slack_response.get("scope"),
|
||||
bot_user_id=slack_response.get("bot_user_id"),
|
||||
webhook_url=slack_response.get("incoming_webhook", {}).get("url"),
|
||||
data=slack_response,
|
||||
team_id=slack_response.get("team", {}).get("id"),
|
||||
team_name=slack_response.get("team", {}).get("name"),
|
||||
workspace_integration=workspace_integration,
|
||||
project_id=project_id,
|
||||
)
|
||||
_ = ProjectMember.objects.get_or_create(
|
||||
member=workspace_integration.actor, role=20, project_id=project_id
|
||||
)
|
||||
serializer = SlackProjectSyncSerializer(slack_project_sync)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except IntegrityError as e:
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"error": "Slack is already installed for the project"},
|
||||
status=status.HTTP_410_GONE,
|
||||
)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
pk=workspace_integration_id, workspace__slug=slug
|
||||
)
|
||||
|
||||
_ = ProjectMember.objects.get_or_create(
|
||||
member=workspace_integration.actor, role=20, project_id=project_id
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except IntegrityError:
|
||||
return Response({"error": "Slack is already enabled for the project"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except WorkspaceIntegration.DoesNotExist:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Workspace Integration does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
{"error": "Slack could not be installed. Please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,6 +2,7 @@
|
||||
import json
|
||||
|
||||
# Django Imports
|
||||
from django.utils import timezone
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import Prefetch, F, OuterRef, Func, Exists, Count, Q
|
||||
from django.core import serializers
|
||||
@@ -14,7 +15,7 @@ from rest_framework import status
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from . import BaseViewSet
|
||||
from . import BaseViewSet, WebhookMixin
|
||||
from plane.api.serializers import (
|
||||
ModuleWriteSerializer,
|
||||
ModuleSerializer,
|
||||
@@ -39,11 +40,13 @@ from plane.utils.grouper import group_results
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
|
||||
class ModuleViewSet(BaseViewSet):
|
||||
|
||||
class ModuleViewSet(WebhookMixin, BaseViewSet):
|
||||
model = Module
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
webhook_event = "module"
|
||||
|
||||
def get_serializer_class(self):
|
||||
return (
|
||||
@@ -53,6 +56,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
def get_queryset(self):
|
||||
|
||||
subquery = ModuleFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
module_id=OuterRef("pk"),
|
||||
@@ -75,171 +79,210 @@ class ModuleViewSet(BaseViewSet):
|
||||
queryset=ModuleLink.objects.select_related("module", "created_by"),
|
||||
)
|
||||
)
|
||||
.annotate(total_issues=Count("issue_module"))
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"issue_module",
|
||||
filter=Q(
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(issue_module__issue__state__group="completed"),
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="completed",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cancelled_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(issue_module__issue__state__group="cancelled"),
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="cancelled",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(issue_module__issue__state__group="started"),
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="started",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
unstarted_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(issue_module__issue__state__group="unstarted"),
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="unstarted",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(issue_module__issue__state__group="backlog"),
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="backlog",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("-is_favorite", "name")
|
||||
.order_by("-is_favorite","-created_at")
|
||||
)
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
module_issues = list(
|
||||
ModuleIssue.objects.filter(module_id=self.kwargs.get("pk")).values_list(
|
||||
"issue", flat=True
|
||||
def create(self, request, slug, project_id):
|
||||
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
||||
serializer = ModuleWriteSerializer(
|
||||
data=request.data, context={"project": project}
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
|
||||
module = Module.objects.get(pk=serializer.data["id"])
|
||||
serializer = ModuleSerializer(module)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def retrieve(self, request, slug, project_id, pk):
|
||||
queryset = self.get_queryset().get(pk=pk)
|
||||
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_module__module_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(first_name=F("assignees__first_name"))
|
||||
.annotate(last_name=F("assignees__last_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.values("first_name", "last_name", "assignee_id", "avatar", "display_name")
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"assignee_id",
|
||||
filter=Q(
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"assignee_id",
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"assignee_id",
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("first_name", "last_name")
|
||||
)
|
||||
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_module__module_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"label_id",
|
||||
filter=Q(
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"label_id",
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"label_id",
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
|
||||
data = ModuleSerializer(queryset).data
|
||||
data["distribution"] = {
|
||||
"assignees": assignee_distribution,
|
||||
"labels": label_distribution,
|
||||
"completion_chart": {},
|
||||
}
|
||||
|
||||
if queryset.start_date and queryset.target_date:
|
||||
data["distribution"]["completion_chart"] = burndown_plot(
|
||||
queryset=queryset, slug=slug, project_id=project_id, module_id=pk
|
||||
)
|
||||
|
||||
return Response(
|
||||
data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, project_id, pk):
|
||||
module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
module_issues = list(
|
||||
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="module.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"module_id": str(self.kwargs.get("pk")),
|
||||
"module_id": str(pk),
|
||||
"module_name": str(module.name),
|
||||
"issues": [str(issue_id) for issue_id in module_issues],
|
||||
}
|
||||
),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("pk", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(pk),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
|
||||
return super().perform_destroy(instance)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
try:
|
||||
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
||||
serializer = ModuleWriteSerializer(
|
||||
data=request.data, context={"project": project}
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
except Project.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
except IntegrityError as e:
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The module name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def retrieve(self, request, slug, project_id, pk):
|
||||
try:
|
||||
queryset = self.get_queryset().get(pk=pk)
|
||||
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_module__module_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(first_name=F("assignees__first_name"))
|
||||
.annotate(last_name=F("assignees__last_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.values("first_name", "last_name", "assignee_id", "avatar")
|
||||
.annotate(total_issues=Count("assignee_id"))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"assignee_id",
|
||||
filter=Q(completed_at__isnull=False),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"assignee_id",
|
||||
filter=Q(completed_at__isnull=True),
|
||||
)
|
||||
)
|
||||
.order_by("first_name", "last_name")
|
||||
)
|
||||
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_module__module_id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(total_issues=Count("label_id"))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"label_id",
|
||||
filter=Q(completed_at__isnull=False),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"label_id",
|
||||
filter=Q(completed_at__isnull=True),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
|
||||
data = ModuleSerializer(queryset).data
|
||||
data["distribution"] = {
|
||||
"assignees": assignee_distribution,
|
||||
"labels": label_distribution,
|
||||
"completion_chart": {},
|
||||
}
|
||||
|
||||
if queryset.start_date and queryset.target_date:
|
||||
data["distribution"]["completion_chart"] = burndown_plot(
|
||||
queryset=queryset, slug=slug, project_id=project_id, module_id=pk
|
||||
)
|
||||
|
||||
return Response(
|
||||
data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
module.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class ModuleIssueViewSet(BaseViewSet):
|
||||
@@ -255,28 +298,6 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save(
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
module_id=self.kwargs.get("module_id"),
|
||||
)
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
issue_activity.delay(
|
||||
type="module.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"module_id": str(self.kwargs.get("module_id")),
|
||||
"issues": [str(instance.issue_id)],
|
||||
}
|
||||
),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("pk", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=None,
|
||||
)
|
||||
return super().perform_destroy(instance)
|
||||
|
||||
def get_queryset(self):
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
@@ -302,154 +323,162 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
def list(self, request, slug, project_id, module_id):
|
||||
try:
|
||||
order_by = request.GET.get("order_by", "created_at")
|
||||
group_by = request.GET.get("group_by", False)
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issues = (
|
||||
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(bridge_id=F("issue_module__id"))
|
||||
.filter(project_id=project_id)
|
||||
.filter(workspace__slug=slug)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("state")
|
||||
.select_related("parent")
|
||||
.prefetch_related("assignees")
|
||||
.prefetch_related("labels")
|
||||
.order_by(order_by)
|
||||
.filter(**filters)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
order_by = request.GET.get("order_by", "created_at")
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issues = (
|
||||
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
|
||||
issues_data = IssueStateSerializer(issues, many=True).data
|
||||
|
||||
if group_by:
|
||||
return Response(
|
||||
group_results(issues_data, group_by),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response(
|
||||
issues_data,
|
||||
status=status.HTTP_200_OK,
|
||||
.annotate(bridge_id=F("issue_module__id"))
|
||||
.filter(project_id=project_id)
|
||||
.filter(workspace__slug=slug)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("state")
|
||||
.select_related("parent")
|
||||
.prefetch_related("assignees")
|
||||
.prefetch_related("labels")
|
||||
.order_by(order_by)
|
||||
.filter(**filters)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
)
|
||||
issues_data = IssueStateSerializer(issues, many=True).data
|
||||
|
||||
if sub_group_by and sub_group_by == group_by:
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
{"error": "Group by and sub group by cannot be same"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
grouped_results = group_results(issues_data, group_by, sub_group_by)
|
||||
return Response(
|
||||
grouped_results,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response(
|
||||
issues_data, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, module_id):
|
||||
try:
|
||||
issues = request.data.get("issues", [])
|
||||
if not len(issues):
|
||||
return Response(
|
||||
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
module = Module.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=module_id
|
||||
issues = request.data.get("issues", [])
|
||||
if not len(issues):
|
||||
return Response(
|
||||
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
module = Module.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=module_id
|
||||
)
|
||||
|
||||
module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
|
||||
module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
|
||||
|
||||
update_module_issue_activity = []
|
||||
records_to_update = []
|
||||
record_to_create = []
|
||||
update_module_issue_activity = []
|
||||
records_to_update = []
|
||||
record_to_create = []
|
||||
|
||||
for issue in issues:
|
||||
module_issue = [
|
||||
module_issue
|
||||
for module_issue in module_issues
|
||||
if str(module_issue.issue_id) in issues
|
||||
]
|
||||
for issue in issues:
|
||||
module_issue = [
|
||||
module_issue
|
||||
for module_issue in module_issues
|
||||
if str(module_issue.issue_id) in issues
|
||||
]
|
||||
|
||||
if len(module_issue):
|
||||
if module_issue[0].module_id != module_id:
|
||||
update_module_issue_activity.append(
|
||||
{
|
||||
"old_module_id": str(module_issue[0].module_id),
|
||||
"new_module_id": str(module_id),
|
||||
"issue_id": str(module_issue[0].issue_id),
|
||||
}
|
||||
)
|
||||
module_issue[0].module_id = module_id
|
||||
records_to_update.append(module_issue[0])
|
||||
else:
|
||||
record_to_create.append(
|
||||
ModuleIssue(
|
||||
module=module,
|
||||
issue_id=issue,
|
||||
project_id=project_id,
|
||||
workspace=module.workspace,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
if len(module_issue):
|
||||
if module_issue[0].module_id != module_id:
|
||||
update_module_issue_activity.append(
|
||||
{
|
||||
"old_module_id": str(module_issue[0].module_id),
|
||||
"new_module_id": str(module_id),
|
||||
"issue_id": str(module_issue[0].issue_id),
|
||||
}
|
||||
)
|
||||
module_issue[0].module_id = module_id
|
||||
records_to_update.append(module_issue[0])
|
||||
else:
|
||||
record_to_create.append(
|
||||
ModuleIssue(
|
||||
module=module,
|
||||
issue_id=issue,
|
||||
project_id=project_id,
|
||||
workspace=module.workspace,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
)
|
||||
|
||||
ModuleIssue.objects.bulk_create(
|
||||
record_to_create,
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
ModuleIssue.objects.bulk_create(
|
||||
record_to_create,
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
ModuleIssue.objects.bulk_update(
|
||||
records_to_update,
|
||||
["module"],
|
||||
batch_size=10,
|
||||
)
|
||||
ModuleIssue.objects.bulk_update(
|
||||
records_to_update,
|
||||
["module"],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="module.activity.created",
|
||||
requested_data=json.dumps({"modules_list": issues}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("pk", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"updated_module_issues": update_module_issue_activity,
|
||||
"created_module_issues": serializers.serialize(
|
||||
"json", record_to_create
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="module.activity.created",
|
||||
requested_data=json.dumps({"modules_list": issues}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"updated_module_issues": update_module_issue_activity,
|
||||
"created_module_issues": serializers.serialize(
|
||||
"json", record_to_create
|
||||
),
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
|
||||
return Response(
|
||||
ModuleIssueSerializer(self.get_queryset(), many=True).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Module.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Module Does not exists"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
return Response(
|
||||
ModuleIssueSerializer(self.get_queryset(), many=True).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, project_id, module_id, pk):
|
||||
module_issue = ModuleIssue.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk
|
||||
)
|
||||
module_issue.delete()
|
||||
issue_activity.delay(
|
||||
type="module.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"module_id": str(module_id),
|
||||
"issues": [str(module_issue.issue_id)],
|
||||
}
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(pk),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class ModuleLinkViewSet(BaseViewSet):
|
||||
@@ -480,10 +509,6 @@ class ModuleLinkViewSet(BaseViewSet):
|
||||
|
||||
|
||||
class ModuleFavoriteViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
serializer_class = ModuleFavoriteSerializer
|
||||
model = ModuleFavorite
|
||||
|
||||
@@ -497,49 +522,18 @@ class ModuleFavoriteViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
try:
|
||||
serializer = ModuleFavoriteSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(user=request.user, project_id=project_id)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except IntegrityError as e:
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"error": "The module is already added to favorites"},
|
||||
status=status.HTTP_410_GONE,
|
||||
)
|
||||
else:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
serializer = ModuleFavoriteSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(user=request.user, project_id=project_id)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def destroy(self, request, slug, project_id, module_id):
|
||||
try:
|
||||
module_favorite = ModuleFavorite.objects.get(
|
||||
project=project_id,
|
||||
user=request.user,
|
||||
workspace__slug=slug,
|
||||
module_id=module_id,
|
||||
)
|
||||
module_favorite.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except ModuleFavorite.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Module is not in favorites"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
module_favorite = ModuleFavorite.objects.get(
|
||||
project=project_id,
|
||||
user=request.user,
|
||||
workspace__slug=slug,
|
||||
module_id=module_id,
|
||||
)
|
||||
module_favorite.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -6,14 +6,21 @@ from django.utils import timezone
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from sentry_sdk import capture_exception
|
||||
from plane.utils.paginator import BasePaginator
|
||||
|
||||
# Module imports
|
||||
from .base import BaseViewSet
|
||||
from plane.db.models import Notification, IssueAssignee, IssueSubscriber, Issue
|
||||
from .base import BaseViewSet, BaseAPIView
|
||||
from plane.db.models import (
|
||||
Notification,
|
||||
IssueAssignee,
|
||||
IssueSubscriber,
|
||||
Issue,
|
||||
WorkspaceMember,
|
||||
)
|
||||
from plane.api.serializers import NotificationSerializer
|
||||
|
||||
|
||||
class NotificationViewSet(BaseViewSet):
|
||||
class NotificationViewSet(BaseViewSet, BasePaginator):
|
||||
model = Notification
|
||||
serializer_class = NotificationSerializer
|
||||
|
||||
@@ -25,181 +32,249 @@ class NotificationViewSet(BaseViewSet):
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
receiver_id=self.request.user.id,
|
||||
)
|
||||
.select_related("workspace")
|
||||
.select_related("workspace", "project," "triggered_by", "receiver")
|
||||
)
|
||||
|
||||
def list(self, request, slug):
|
||||
try:
|
||||
order_by = request.GET.get("order_by", "-created_at")
|
||||
snoozed = request.GET.get("snoozed", "false")
|
||||
archived = request.GET.get("archived", "false")
|
||||
# Get query parameters
|
||||
snoozed = request.GET.get("snoozed", "false")
|
||||
archived = request.GET.get("archived", "false")
|
||||
read = request.GET.get("read", "true")
|
||||
type = request.GET.get("type", "all")
|
||||
|
||||
# Filter type
|
||||
type = request.GET.get("type", "all")
|
||||
|
||||
notifications = Notification.objects.filter(
|
||||
notifications = (
|
||||
Notification.objects.filter(
|
||||
workspace__slug=slug, receiver_id=request.user.id
|
||||
).order_by(order_by)
|
||||
)
|
||||
.select_related("workspace", "project", "triggered_by", "receiver")
|
||||
.order_by("snoozed_till", "-created_at")
|
||||
)
|
||||
|
||||
# Filter for snoozed notifications
|
||||
if snoozed == "false":
|
||||
notifications = notifications.filter(
|
||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
||||
)
|
||||
# Filters based on query parameters
|
||||
snoozed_filters = {
|
||||
"true": Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False),
|
||||
"false": Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
||||
}
|
||||
|
||||
if snoozed == "true":
|
||||
notifications = notifications.filter(
|
||||
snoozed_till__lt=timezone.now(),
|
||||
)
|
||||
notifications = notifications.filter(snoozed_filters[snoozed])
|
||||
|
||||
# Filter for archived or unarchive
|
||||
if archived == "true":
|
||||
notifications = notifications.filter(archived_at__isnull=True)
|
||||
archived_filters = {
|
||||
"true": Q(archived_at__isnull=False),
|
||||
"false": Q(archived_at__isnull=True),
|
||||
}
|
||||
|
||||
if archived == "false":
|
||||
notifications = notifications.filter(archived_at__isnull=False)
|
||||
notifications = notifications.filter(archived_filters[archived])
|
||||
|
||||
# Subscribed issues
|
||||
if type == "watching":
|
||||
issue_ids = IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug, subsriber_id=request.user.id
|
||||
).values_list("issue_id", flat=True)
|
||||
notifications = notifications.filter(entity_identifier__in=issue_ids)
|
||||
if read == "false":
|
||||
notifications = notifications.filter(read_at__isnull=True)
|
||||
|
||||
# Assigned Issues
|
||||
if type == "assigned":
|
||||
issue_ids = IssueAssignee.objects.filter(
|
||||
workspace__slug=slug, assignee_id=request.user.id
|
||||
).values_list("issue_id", flat=True)
|
||||
notifications = notifications.filter(entity_identifier__in=issue_ids)
|
||||
# Subscribed issues
|
||||
if type == "watching":
|
||||
issue_ids = IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug, subscriber_id=request.user.id
|
||||
).values_list("issue_id", flat=True)
|
||||
notifications = notifications.filter(entity_identifier__in=issue_ids)
|
||||
|
||||
# Created issues
|
||||
if type == "created":
|
||||
# Assigned Issues
|
||||
if type == "assigned":
|
||||
issue_ids = IssueAssignee.objects.filter(
|
||||
workspace__slug=slug, assignee_id=request.user.id
|
||||
).values_list("issue_id", flat=True)
|
||||
notifications = notifications.filter(entity_identifier__in=issue_ids)
|
||||
|
||||
# Created issues
|
||||
if type == "created":
|
||||
if WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role__lt=15,
|
||||
is_active=True,
|
||||
).exists():
|
||||
notifications = Notification.objects.none()
|
||||
else:
|
||||
issue_ids = Issue.objects.filter(
|
||||
workspace__slug=slug, created_by=request.user
|
||||
).values_list("pk", flat=True)
|
||||
notifications = notifications.filter(entity_identifier__in=issue_ids)
|
||||
|
||||
serializer = NotificationSerializer(notifications, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
# Pagination
|
||||
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(notifications),
|
||||
on_results=lambda notifications: NotificationSerializer(
|
||||
notifications, many=True
|
||||
).data,
|
||||
)
|
||||
|
||||
serializer = NotificationSerializer(notifications, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def partial_update(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
workspace__slug=slug, pk=pk, receiver=request.user
|
||||
)
|
||||
# Only read_at and snoozed_till can be updated
|
||||
notification_data = {
|
||||
"snoozed_till": request.data.get("snoozed_till", None),
|
||||
}
|
||||
serializer = NotificationSerializer(
|
||||
notification, data=notification_data, partial=True
|
||||
)
|
||||
notification = Notification.objects.get(
|
||||
workspace__slug=slug, pk=pk, receiver=request.user
|
||||
)
|
||||
# Only read_at and snoozed_till can be updated
|
||||
notification_data = {
|
||||
"snoozed_till": request.data.get("snoozed_till", None),
|
||||
}
|
||||
serializer = NotificationSerializer(
|
||||
notification, data=notification_data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def mark_read(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.read_at = timezone.now()
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def mark_read(self, request, slug, pk):
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.read_at = timezone.now()
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def mark_unread(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.read_at = None
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.read_at = None
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def archive(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.archived_at = timezone.now()
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.archived_at = timezone.now()
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def unarchive(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.archived_at = None
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class UnreadNotificationEndpoint(BaseAPIView):
|
||||
def get(self, request, slug):
|
||||
# Watching Issues Count
|
||||
watching_issues_count = Notification.objects.filter(
|
||||
workspace__slug=slug,
|
||||
receiver_id=request.user.id,
|
||||
read_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
entity_identifier__in=IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug, subscriber_id=request.user.id
|
||||
).values_list("issue_id", flat=True),
|
||||
).count()
|
||||
|
||||
# My Issues Count
|
||||
my_issues_count = Notification.objects.filter(
|
||||
workspace__slug=slug,
|
||||
receiver_id=request.user.id,
|
||||
read_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
entity_identifier__in=IssueAssignee.objects.filter(
|
||||
workspace__slug=slug, assignee_id=request.user.id
|
||||
).values_list("issue_id", flat=True),
|
||||
).count()
|
||||
|
||||
# Created Issues Count
|
||||
created_issues_count = Notification.objects.filter(
|
||||
workspace__slug=slug,
|
||||
receiver_id=request.user.id,
|
||||
read_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
entity_identifier__in=Issue.objects.filter(
|
||||
workspace__slug=slug, created_by=request.user
|
||||
).values_list("pk", flat=True),
|
||||
).count()
|
||||
|
||||
return Response(
|
||||
{
|
||||
"watching_issues": watching_issues_count,
|
||||
"my_issues": my_issues_count,
|
||||
"created_issues": created_issues_count,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class MarkAllReadNotificationViewSet(BaseViewSet):
|
||||
def create(self, request, slug):
|
||||
snoozed = request.data.get("snoozed", False)
|
||||
archived = request.data.get("archived", False)
|
||||
type = request.data.get("type", "all")
|
||||
|
||||
notifications = (
|
||||
Notification.objects.filter(
|
||||
workspace__slug=slug,
|
||||
receiver_id=request.user.id,
|
||||
read_at__isnull=True,
|
||||
)
|
||||
notification.archived_at = None
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
.select_related("workspace", "project", "triggered_by", "receiver")
|
||||
.order_by("snoozed_till", "-created_at")
|
||||
)
|
||||
|
||||
# Filter for snoozed notifications
|
||||
if snoozed:
|
||||
notifications = notifications.filter(
|
||||
Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False)
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
else:
|
||||
notifications = notifications.filter(
|
||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
||||
)
|
||||
|
||||
# Filter for archived or unarchive
|
||||
if archived:
|
||||
notifications = notifications.filter(archived_at__isnull=False)
|
||||
else:
|
||||
notifications = notifications.filter(archived_at__isnull=True)
|
||||
|
||||
# Subscribed issues
|
||||
if type == "watching":
|
||||
issue_ids = IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug, subscriber_id=request.user.id
|
||||
).values_list("issue_id", flat=True)
|
||||
notifications = notifications.filter(entity_identifier__in=issue_ids)
|
||||
|
||||
# Assigned Issues
|
||||
if type == "assigned":
|
||||
issue_ids = IssueAssignee.objects.filter(
|
||||
workspace__slug=slug, assignee_id=request.user.id
|
||||
).values_list("issue_id", flat=True)
|
||||
notifications = notifications.filter(entity_identifier__in=issue_ids)
|
||||
|
||||
# Created issues
|
||||
if type == "created":
|
||||
if WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role__lt=15,
|
||||
is_active=True,
|
||||
).exists():
|
||||
notifications = Notification.objects.none()
|
||||
else:
|
||||
issue_ids = Issue.objects.filter(
|
||||
workspace__slug=slug, created_by=request.user
|
||||
).values_list("pk", flat=True)
|
||||
notifications = notifications.filter(entity_identifier__in=issue_ids)
|
||||
|
||||
updated_notifications = []
|
||||
for notification in notifications:
|
||||
notification.read_at = timezone.now()
|
||||
updated_notifications.append(notification)
|
||||
Notification.objects.bulk_update(
|
||||
updated_notifications, ["read_at"], batch_size=100
|
||||
)
|
||||
return Response({"message": "Successful"}, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import uuid
|
||||
import requests
|
||||
import os
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
@@ -11,16 +12,23 @@ from django.conf import settings
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import exceptions
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework_simplejwt.tokens import RefreshToken
|
||||
from rest_framework import status
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# sso authentication
|
||||
from google.oauth2 import id_token
|
||||
from google.auth.transport import requests as google_auth_request
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import SocialLoginConnection, User
|
||||
from plane.db.models import (
|
||||
SocialLoginConnection,
|
||||
User,
|
||||
WorkspaceMemberInvite,
|
||||
WorkspaceMember,
|
||||
ProjectMemberInvite,
|
||||
ProjectMember,
|
||||
)
|
||||
from plane.api.serializers import UserSerializer
|
||||
from .base import BaseAPIView
|
||||
|
||||
@@ -112,7 +120,7 @@ def get_user_data(access_token: str) -> dict:
|
||||
url="https://api.github.com/user/emails", headers=headers
|
||||
).json()
|
||||
|
||||
[
|
||||
_ = [
|
||||
user_data.update({"email": item.get("email")})
|
||||
for item in response
|
||||
if item.get("primary") is True
|
||||
@@ -146,7 +154,7 @@ class OauthEndpoint(BaseAPIView):
|
||||
data = get_user_data(access_token)
|
||||
|
||||
email = data.get("email", None)
|
||||
if email == None:
|
||||
if email is None:
|
||||
return Response(
|
||||
{
|
||||
"error": "Something went wrong. Please try again later or contact the support team."
|
||||
@@ -157,7 +165,6 @@ class OauthEndpoint(BaseAPIView):
|
||||
if "@" in email:
|
||||
user = User.objects.get(email=email)
|
||||
email = data["email"]
|
||||
channel = "email"
|
||||
mobile_number = uuid.uuid4().hex
|
||||
email_verified = True
|
||||
else:
|
||||
@@ -169,7 +176,6 @@ class OauthEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
## Login Case
|
||||
|
||||
if not user.is_active:
|
||||
return Response(
|
||||
{
|
||||
@@ -181,20 +187,66 @@ class OauthEndpoint(BaseAPIView):
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
user.last_login_medium = f"oauth"
|
||||
user.last_login_medium = "oauth"
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.is_email_verified = email_verified
|
||||
user.save()
|
||||
|
||||
serialized_user = UserSerializer(user).data
|
||||
# Check if user has any accepted invites for workspace and add them to workspace
|
||||
workspace_member_invites = WorkspaceMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
)
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
workspace_id=workspace_member_invite.workspace_id,
|
||||
member=user,
|
||||
role=workspace_member_invite.role,
|
||||
)
|
||||
for workspace_member_invite in workspace_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"user": serialized_user,
|
||||
}
|
||||
# Check if user has any project invites
|
||||
project_member_invites = ProjectMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
)
|
||||
|
||||
# Add user to workspace
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
workspace_id=project_member_invite.workspace_id,
|
||||
role=project_member_invite.role
|
||||
if project_member_invite.role in [5, 10, 15]
|
||||
else 15,
|
||||
member=user,
|
||||
created_by_id=project_member_invite.created_by_id,
|
||||
)
|
||||
for project_member_invite in project_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Now add the users to project
|
||||
ProjectMember.objects.bulk_create(
|
||||
[
|
||||
ProjectMember(
|
||||
workspace_id=project_member_invite.workspace_id,
|
||||
role=project_member_invite.role
|
||||
if project_member_invite.role in [5, 10, 15]
|
||||
else 15,
|
||||
member=user,
|
||||
created_by_id=project_member_invite.created_by_id,
|
||||
) for project_member_invite in project_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
# Delete all the invites
|
||||
workspace_member_invites.delete()
|
||||
project_member_invites.delete()
|
||||
|
||||
SocialLoginConnection.objects.update_or_create(
|
||||
medium=medium,
|
||||
@@ -205,26 +257,36 @@ class OauthEndpoint(BaseAPIView):
|
||||
"last_login_at": timezone.now(),
|
||||
},
|
||||
)
|
||||
if settings.ANALYTICS_BASE_API:
|
||||
_ = requests.post(
|
||||
settings.ANALYTICS_BASE_API,
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
||||
},
|
||||
json={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"event_data": {
|
||||
"medium": f"oauth-{medium}",
|
||||
try:
|
||||
if settings.ANALYTICS_BASE_API:
|
||||
_ = requests.post(
|
||||
settings.ANALYTICS_BASE_API,
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
||||
},
|
||||
"user": {"email": email, "id": str(user.id)},
|
||||
"device_ctx": {
|
||||
"ip": request.META.get("REMOTE_ADDR"),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||
json={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"event_data": {
|
||||
"medium": f"oauth-{medium}",
|
||||
},
|
||||
"user": {"email": email, "id": str(user.id)},
|
||||
"device_ctx": {
|
||||
"ip": request.META.get("REMOTE_ADDR"),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||
},
|
||||
"event_type": "SIGN_IN",
|
||||
},
|
||||
"event_type": "SIGN_IN",
|
||||
},
|
||||
)
|
||||
)
|
||||
except RequestException as e:
|
||||
capture_exception(e)
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
}
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
except User.DoesNotExist:
|
||||
@@ -235,7 +297,6 @@ class OauthEndpoint(BaseAPIView):
|
||||
if "@" in email:
|
||||
email = data["email"]
|
||||
mobile_number = uuid.uuid4().hex
|
||||
channel = "email"
|
||||
email_verified = True
|
||||
else:
|
||||
return Response(
|
||||
@@ -264,35 +325,86 @@ class OauthEndpoint(BaseAPIView):
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
serialized_user = UserSerializer(user).data
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"user": serialized_user,
|
||||
"permissions": [],
|
||||
}
|
||||
if settings.ANALYTICS_BASE_API:
|
||||
_ = requests.post(
|
||||
settings.ANALYTICS_BASE_API,
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
||||
},
|
||||
json={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"event_data": {
|
||||
"medium": f"oauth-{medium}",
|
||||
# Check if user has any accepted invites for workspace and add them to workspace
|
||||
workspace_member_invites = WorkspaceMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
)
|
||||
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
workspace_id=workspace_member_invite.workspace_id,
|
||||
member=user,
|
||||
role=workspace_member_invite.role,
|
||||
)
|
||||
for workspace_member_invite in workspace_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Check if user has any project invites
|
||||
project_member_invites = ProjectMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
)
|
||||
|
||||
# Add user to workspace
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
workspace_id=project_member_invite.workspace_id,
|
||||
role=project_member_invite.role
|
||||
if project_member_invite.role in [5, 10, 15]
|
||||
else 15,
|
||||
member=user,
|
||||
created_by_id=project_member_invite.created_by_id,
|
||||
)
|
||||
for project_member_invite in project_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Now add the users to project
|
||||
ProjectMember.objects.bulk_create(
|
||||
[
|
||||
ProjectMember(
|
||||
workspace_id=project_member_invite.workspace_id,
|
||||
role=project_member_invite.role
|
||||
if project_member_invite.role in [5, 10, 15]
|
||||
else 15,
|
||||
member=user,
|
||||
created_by_id=project_member_invite.created_by_id,
|
||||
) for project_member_invite in project_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
# Delete all the invites
|
||||
workspace_member_invites.delete()
|
||||
project_member_invites.delete()
|
||||
|
||||
try:
|
||||
if settings.ANALYTICS_BASE_API:
|
||||
_ = requests.post(
|
||||
settings.ANALYTICS_BASE_API,
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
||||
},
|
||||
"user": {"email": email, "id": str(user.id)},
|
||||
"device_ctx": {
|
||||
"ip": request.META.get("REMOTE_ADDR"),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||
json={
|
||||
"event_id": uuid.uuid4().hex,
|
||||
"event_data": {
|
||||
"medium": f"oauth-{medium}",
|
||||
},
|
||||
"user": {"email": email, "id": str(user.id)},
|
||||
"device_ctx": {
|
||||
"ip": request.META.get("REMOTE_ADDR"),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||
},
|
||||
"event_type": "SIGN_UP",
|
||||
},
|
||||
"event_type": "SIGN_UP",
|
||||
},
|
||||
)
|
||||
)
|
||||
except RequestException as e:
|
||||
capture_exception(e)
|
||||
|
||||
SocialLoginConnection.objects.update_or_create(
|
||||
medium=medium,
|
||||
@@ -303,12 +415,10 @@ class OauthEndpoint(BaseAPIView):
|
||||
"last_login_at": timezone.now(),
|
||||
},
|
||||
)
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
}
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{
|
||||
"error": "Something went wrong. Please try again later or contact the support team."
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
# Python imports
|
||||
from datetime import timedelta, datetime, date
|
||||
from datetime import timedelta, date
|
||||
|
||||
# Django imports
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import Exists, OuterRef, Q, Prefetch
|
||||
from django.utils import timezone
|
||||
|
||||
@@ -78,104 +77,82 @@ class PageViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
try:
|
||||
serializer = PageSerializer(
|
||||
data=request.data,
|
||||
context={"project_id": project_id, "owned_by_id": request.user.id},
|
||||
)
|
||||
serializer = PageSerializer(
|
||||
data=request.data,
|
||||
context={"project_id": project_id, "owned_by_id": request.user.id},
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
try:
|
||||
page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
|
||||
# Only update access if the page owner is the requesting user
|
||||
if (
|
||||
page.access != request.data.get("access", page.access)
|
||||
and page.owned_by_id != request.user.id
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error": "Access cannot be updated since this page is owned by someone else"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
serializer = PageSerializer(page, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Page.DoesNotExist:
|
||||
page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
|
||||
# Only update access if the page owner is the requesting user
|
||||
if (
|
||||
page.access != request.data.get("access", page.access)
|
||||
and page.owned_by_id != request.user.id
|
||||
):
|
||||
return Response(
|
||||
{"error": "Page Does not exist"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
{
|
||||
"error": "Access cannot be updated since this page is owned by someone else"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
serializer = PageSerializer(page, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def list(self, request, slug, project_id):
|
||||
try:
|
||||
queryset = self.get_queryset()
|
||||
page_view = request.GET.get("page_view", False)
|
||||
queryset = self.get_queryset()
|
||||
page_view = request.GET.get("page_view", False)
|
||||
|
||||
if not page_view:
|
||||
return Response({"error": "Page View parameter is required"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if not page_view:
|
||||
return Response({"error": "Page View parameter is required"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# All Pages
|
||||
if page_view == "all":
|
||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
||||
# All Pages
|
||||
if page_view == "all":
|
||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
||||
|
||||
# Recent pages
|
||||
if page_view == "recent":
|
||||
current_time = date.today()
|
||||
day_before = current_time - timedelta(days=1)
|
||||
todays_pages = queryset.filter(updated_at__date=date.today())
|
||||
yesterdays_pages = queryset.filter(updated_at__date=day_before)
|
||||
earlier_this_week = queryset.filter( updated_at__date__range=(
|
||||
(timezone.now() - timedelta(days=7)),
|
||||
(timezone.now() - timedelta(days=2)),
|
||||
))
|
||||
return Response(
|
||||
{
|
||||
"today": PageSerializer(todays_pages, many=True).data,
|
||||
"yesterday": PageSerializer(yesterdays_pages, many=True).data,
|
||||
"earlier_this_week": PageSerializer(earlier_this_week, many=True).data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
# Recent pages
|
||||
if page_view == "recent":
|
||||
current_time = date.today()
|
||||
day_before = current_time - timedelta(days=1)
|
||||
todays_pages = queryset.filter(updated_at__date=date.today())
|
||||
yesterdays_pages = queryset.filter(updated_at__date=day_before)
|
||||
earlier_this_week = queryset.filter( updated_at__date__range=(
|
||||
(timezone.now() - timedelta(days=7)),
|
||||
(timezone.now() - timedelta(days=2)),
|
||||
))
|
||||
return Response(
|
||||
{
|
||||
"today": PageSerializer(todays_pages, many=True).data,
|
||||
"yesterday": PageSerializer(yesterdays_pages, many=True).data,
|
||||
"earlier_this_week": PageSerializer(earlier_this_week, many=True).data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
# Favorite Pages
|
||||
if page_view == "favorite":
|
||||
queryset = queryset.filter(is_favorite=True)
|
||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
||||
|
||||
# My pages
|
||||
if page_view == "created_by_me":
|
||||
queryset = queryset.filter(owned_by=request.user)
|
||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
||||
# Favorite Pages
|
||||
if page_view == "favorite":
|
||||
queryset = queryset.filter(is_favorite=True)
|
||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
||||
|
||||
# My pages
|
||||
if page_view == "created_by_me":
|
||||
queryset = queryset.filter(owned_by=request.user)
|
||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
||||
|
||||
# Created by other Pages
|
||||
if page_view == "created_by_other":
|
||||
queryset = queryset.filter(~Q(owned_by=request.user), access=0)
|
||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
||||
# Created by other Pages
|
||||
if page_view == "created_by_other":
|
||||
queryset = queryset.filter(~Q(owned_by=request.user), access=0)
|
||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
||||
|
||||
return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
class PageBlockViewSet(BaseViewSet):
|
||||
serializer_class = PageBlockSerializer
|
||||
@@ -225,53 +202,21 @@ class PageFavoriteViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
try:
|
||||
serializer = PageFavoriteSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(user=request.user, project_id=project_id)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except IntegrityError as e:
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"error": "The page is already added to favorites"},
|
||||
status=status.HTTP_410_GONE,
|
||||
)
|
||||
else:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
serializer = PageFavoriteSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(user=request.user, project_id=project_id)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def destroy(self, request, slug, project_id, page_id):
|
||||
try:
|
||||
page_favorite = PageFavorite.objects.get(
|
||||
project=project_id,
|
||||
user=request.user,
|
||||
workspace__slug=slug,
|
||||
page_id=page_id,
|
||||
)
|
||||
page_favorite.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except PageFavorite.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Page is not in favorites"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
page_favorite = PageFavorite.objects.get(
|
||||
project=project_id,
|
||||
user=request.user,
|
||||
workspace__slug=slug,
|
||||
page_id=page_id,
|
||||
)
|
||||
page_favorite.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
class CreateIssueFromPageBlockEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
@@ -279,43 +224,32 @@ class CreateIssueFromPageBlockEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
def post(self, request, slug, project_id, page_id, page_block_id):
|
||||
try:
|
||||
page_block = PageBlock.objects.get(
|
||||
pk=page_block_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
page_id=page_id,
|
||||
)
|
||||
issue = Issue.objects.create(
|
||||
name=page_block.name,
|
||||
project_id=project_id,
|
||||
description=page_block.description,
|
||||
description_html=page_block.description_html,
|
||||
description_stripped=page_block.description_stripped,
|
||||
)
|
||||
_ = IssueAssignee.objects.create(
|
||||
issue=issue, assignee=request.user, project_id=project_id
|
||||
)
|
||||
page_block = PageBlock.objects.get(
|
||||
pk=page_block_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
page_id=page_id,
|
||||
)
|
||||
issue = Issue.objects.create(
|
||||
name=page_block.name,
|
||||
project_id=project_id,
|
||||
description=page_block.description,
|
||||
description_html=page_block.description_html,
|
||||
description_stripped=page_block.description_stripped,
|
||||
)
|
||||
_ = IssueAssignee.objects.create(
|
||||
issue=issue, assignee=request.user, project_id=project_id
|
||||
)
|
||||
|
||||
_ = IssueActivity.objects.create(
|
||||
issue=issue,
|
||||
actor=request.user,
|
||||
project_id=project_id,
|
||||
comment=f"{request.user.email} created the issue from {page_block.name} block",
|
||||
verb="created",
|
||||
)
|
||||
_ = IssueActivity.objects.create(
|
||||
issue=issue,
|
||||
actor=request.user,
|
||||
project_id=project_id,
|
||||
comment=f"created the issue from {page_block.name} block",
|
||||
verb="created",
|
||||
)
|
||||
|
||||
page_block.issue = issue
|
||||
page_block.save()
|
||||
page_block.issue = issue
|
||||
page_block.save()
|
||||
|
||||
return Response(IssueLiteSerializer(issue).data, status=status.HTTP_200_OK)
|
||||
except PageBlock.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Page Block does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
return Response(IssueLiteSerializer(issue).data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -1,131 +0,0 @@
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.api.serializers import (
|
||||
UserSerializer,
|
||||
IssueActivitySerializer,
|
||||
)
|
||||
|
||||
from plane.api.views.base import BaseViewSet, BaseAPIView
|
||||
from plane.db.models import (
|
||||
User,
|
||||
Workspace,
|
||||
WorkspaceMemberInvite,
|
||||
Issue,
|
||||
IssueActivity,
|
||||
WorkspaceMember,
|
||||
)
|
||||
from plane.utils.paginator import BasePaginator
|
||||
|
||||
|
||||
class UserEndpoint(BaseViewSet):
|
||||
serializer_class = UserSerializer
|
||||
model = User
|
||||
|
||||
def get_object(self):
|
||||
return self.request.user
|
||||
|
||||
def retrieve(self, request):
|
||||
try:
|
||||
workspace = Workspace.objects.get(
|
||||
pk=request.user.last_workspace_id, workspace_member__member=request.user
|
||||
)
|
||||
workspace_invites = WorkspaceMemberInvite.objects.filter(
|
||||
email=request.user.email
|
||||
).count()
|
||||
assigned_issues = Issue.issue_objects.filter(assignees__in=[request.user]).count()
|
||||
|
||||
serialized_data = UserSerializer(request.user).data
|
||||
serialized_data["workspace"] = {
|
||||
"last_workspace_id": request.user.last_workspace_id,
|
||||
"last_workspace_slug": workspace.slug,
|
||||
"fallback_workspace_id": request.user.last_workspace_id,
|
||||
"fallback_workspace_slug": workspace.slug,
|
||||
"invites": workspace_invites,
|
||||
}
|
||||
serialized_data.setdefault("issues", {})["assigned_issues"] = assigned_issues
|
||||
|
||||
return Response(
|
||||
serialized_data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Workspace.DoesNotExist:
|
||||
# This exception will be hit even when the `last_workspace_id` is None
|
||||
|
||||
workspace_invites = WorkspaceMemberInvite.objects.filter(
|
||||
email=request.user.email
|
||||
).count()
|
||||
assigned_issues = Issue.issue_objects.filter(assignees__in=[request.user]).count()
|
||||
|
||||
fallback_workspace = Workspace.objects.filter(
|
||||
workspace_member__member=request.user
|
||||
).order_by("created_at").first()
|
||||
|
||||
serialized_data = UserSerializer(request.user).data
|
||||
|
||||
serialized_data["workspace"] = {
|
||||
"last_workspace_id": None,
|
||||
"last_workspace_slug": None,
|
||||
"fallback_workspace_id": fallback_workspace.id
|
||||
if fallback_workspace is not None
|
||||
else None,
|
||||
"fallback_workspace_slug": fallback_workspace.slug
|
||||
if fallback_workspace is not None
|
||||
else None,
|
||||
"invites": workspace_invites,
|
||||
}
|
||||
serialized_data.setdefault("issues", {})["assigned_issues"] = assigned_issues
|
||||
|
||||
return Response(
|
||||
serialized_data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class UpdateUserOnBoardedEndpoint(BaseAPIView):
|
||||
def patch(self, request):
|
||||
try:
|
||||
user = User.objects.get(pk=request.user.id)
|
||||
user.is_onboarded = request.data.get("is_onboarded", False)
|
||||
user.save()
|
||||
return Response(
|
||||
{"message": "Updated successfully"}, status=status.HTTP_200_OK
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class UserActivityEndpoint(BaseAPIView, BasePaginator):
|
||||
def get(self, request):
|
||||
try:
|
||||
queryset = IssueActivity.objects.filter(actor=request.user).select_related(
|
||||
"actor", "workspace"
|
||||
)
|
||||
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=queryset,
|
||||
on_results=lambda issue_activities: IssueActivitySerializer(
|
||||
issue_activities, many=True
|
||||
).data,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,21 +0,0 @@
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from .base import BaseAPIView
|
||||
from plane.utils.integrations.github import get_release_notes
|
||||
|
||||
|
||||
class ReleaseNotesEndpoint(BaseAPIView):
|
||||
def get(self, request):
|
||||
try:
|
||||
release_notes = get_release_notes()
|
||||
return Response(release_notes, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
@@ -20,7 +20,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
also show related workspace if found
|
||||
"""
|
||||
|
||||
def filter_workspaces(self, query, slug, project_id):
|
||||
def filter_workspaces(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
@@ -31,8 +31,8 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
.values("name", "id", "slug")
|
||||
)
|
||||
|
||||
def filter_projects(self, query, slug, project_id):
|
||||
fields = ["name"]
|
||||
def filter_projects(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name", "identifier"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
@@ -46,8 +46,8 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
.values("name", "id", "identifier", "workspace__slug")
|
||||
)
|
||||
|
||||
def filter_issues(self, query, slug, project_id):
|
||||
fields = ["name", "sequence_id"]
|
||||
def filter_issues(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name", "sequence_id", "project__identifier"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
if field == "sequence_id":
|
||||
@@ -56,223 +56,219 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
q |= Q(**{"sequence_id": sequence_id})
|
||||
else:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Issue.issue_objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
issues = Issue.issue_objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def filter_cycles(self, query, slug, project_id):
|
||||
if workspace_search == "false" and project_id:
|
||||
issues = issues.filter(project_id=project_id)
|
||||
|
||||
return issues.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
def filter_cycles(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Cycle.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
cycles = Cycle.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def filter_modules(self, query, slug, project_id):
|
||||
if workspace_search == "false" and project_id:
|
||||
cycles = cycles.filter(project_id=project_id)
|
||||
|
||||
return cycles.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
def filter_modules(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Module.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
modules = Module.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def filter_pages(self, query, slug, project_id):
|
||||
if workspace_search == "false" and project_id:
|
||||
modules = modules.filter(project_id=project_id)
|
||||
|
||||
return modules.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
def filter_pages(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Page.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
pages = Page.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def filter_views(self, query, slug, project_id):
|
||||
if workspace_search == "false" and project_id:
|
||||
pages = pages.filter(project_id=project_id)
|
||||
|
||||
return pages.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
def filter_views(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
IssueView.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
issue_views = IssueView.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
query = request.query_params.get("search", False)
|
||||
if not query:
|
||||
return Response(
|
||||
{
|
||||
"results": {
|
||||
"workspace": [],
|
||||
"project": [],
|
||||
"issue": [],
|
||||
"cycle": [],
|
||||
"module": [],
|
||||
"issue_view": [],
|
||||
"page": [],
|
||||
}
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
if workspace_search == "false" and project_id:
|
||||
issue_views = issue_views.filter(project_id=project_id)
|
||||
|
||||
MODELS_MAPPER = {
|
||||
"workspace": self.filter_workspaces,
|
||||
"project": self.filter_projects,
|
||||
"issue": self.filter_issues,
|
||||
"cycle": self.filter_cycles,
|
||||
"module": self.filter_modules,
|
||||
"issue_view": self.filter_views,
|
||||
"page": self.filter_pages,
|
||||
}
|
||||
return issue_views.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
results = {}
|
||||
def get(self, request, slug):
|
||||
query = request.query_params.get("search", False)
|
||||
workspace_search = request.query_params.get("workspace_search", "false")
|
||||
project_id = request.query_params.get("project_id", False)
|
||||
|
||||
for model in MODELS_MAPPER.keys():
|
||||
func = MODELS_MAPPER.get(model, None)
|
||||
results[model] = func(query, slug, project_id)
|
||||
return Response({"results": results}, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
if not query:
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
{
|
||||
"results": {
|
||||
"workspace": [],
|
||||
"project": [],
|
||||
"issue": [],
|
||||
"cycle": [],
|
||||
"module": [],
|
||||
"issue_view": [],
|
||||
"page": [],
|
||||
}
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
MODELS_MAPPER = {
|
||||
"workspace": self.filter_workspaces,
|
||||
"project": self.filter_projects,
|
||||
"issue": self.filter_issues,
|
||||
"cycle": self.filter_cycles,
|
||||
"module": self.filter_modules,
|
||||
"issue_view": self.filter_views,
|
||||
"page": self.filter_pages,
|
||||
}
|
||||
|
||||
results = {}
|
||||
|
||||
for model in MODELS_MAPPER.keys():
|
||||
func = MODELS_MAPPER.get(model, None)
|
||||
results[model] = func(query, slug, project_id, workspace_search)
|
||||
return Response({"results": results}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class IssueSearchEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
query = request.query_params.get("search", False)
|
||||
parent = request.query_params.get("parent", "false")
|
||||
blocker_blocked_by = request.query_params.get("blocker_blocked_by", "false")
|
||||
cycle = request.query_params.get("cycle", "false")
|
||||
module = request.query_params.get("module", "false")
|
||||
sub_issue = request.query_params.get("sub_issue", "false")
|
||||
query = request.query_params.get("search", False)
|
||||
workspace_search = request.query_params.get("workspace_search", "false")
|
||||
parent = request.query_params.get("parent", "false")
|
||||
issue_relation = request.query_params.get("issue_relation", "false")
|
||||
cycle = request.query_params.get("cycle", "false")
|
||||
module = request.query_params.get("module", "false")
|
||||
sub_issue = request.query_params.get("sub_issue", "false")
|
||||
|
||||
issue_id = request.query_params.get("issue_id", False)
|
||||
issue_id = request.query_params.get("issue_id", False)
|
||||
|
||||
issues = Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
)
|
||||
issues = Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
)
|
||||
|
||||
if query:
|
||||
issues = search_issues(query, issues)
|
||||
if workspace_search == "false":
|
||||
issues = issues.filter(project_id=project_id)
|
||||
|
||||
if parent == "true" and issue_id:
|
||||
issue = Issue.issue_objects.get(pk=issue_id)
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True
|
||||
).exclude(
|
||||
pk__in=Issue.issue_objects.filter(parent__isnull=False).values_list(
|
||||
"parent_id", flat=True
|
||||
)
|
||||
if query:
|
||||
issues = search_issues(query, issues)
|
||||
|
||||
if parent == "true" and issue_id:
|
||||
issue = Issue.issue_objects.get(pk=issue_id)
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True
|
||||
).exclude(
|
||||
pk__in=Issue.issue_objects.filter(parent__isnull=False).values_list(
|
||||
"parent_id", flat=True
|
||||
)
|
||||
if blocker_blocked_by == "true" and issue_id:
|
||||
issue = Issue.issue_objects.get(pk=issue_id)
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id),
|
||||
~Q(blocked_issues__block=issue),
|
||||
~Q(blocker_issues__blocked_by=issue),
|
||||
)
|
||||
if sub_issue == "true" and issue_id:
|
||||
issue = Issue.issue_objects.get(pk=issue_id)
|
||||
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
|
||||
if issue.parent:
|
||||
issues = issues.filter(~Q(pk=issue.parent_id))
|
||||
|
||||
if cycle == "true":
|
||||
issues = issues.exclude(issue_cycle__isnull=False)
|
||||
|
||||
if module == "true":
|
||||
issues = issues.exclude(issue_module__isnull=False)
|
||||
|
||||
|
||||
return Response(
|
||||
issues.values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
"state__name",
|
||||
"state__group",
|
||||
"state__color",
|
||||
),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Issue.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Issue Does not exist"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
if issue_relation == "true" and issue_id:
|
||||
issue = Issue.issue_objects.get(pk=issue_id)
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id),
|
||||
~Q(issue_related__issue=issue),
|
||||
~Q(issue_relation__related_issue=issue),
|
||||
)
|
||||
if sub_issue == "true" and issue_id:
|
||||
issue = Issue.issue_objects.get(pk=issue_id)
|
||||
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
|
||||
if issue.parent:
|
||||
issues = issues.filter(~Q(pk=issue.parent_id))
|
||||
|
||||
if cycle == "true":
|
||||
issues = issues.exclude(issue_cycle__isnull=False)
|
||||
|
||||
if module == "true":
|
||||
issues = issues.exclude(issue_module__isnull=False)
|
||||
|
||||
return Response(
|
||||
issues.values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__name",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
"state__name",
|
||||
"state__group",
|
||||
"state__color",
|
||||
),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user