mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Compare commits
537 Commits
fix-migrat
...
release-0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a446bc043e | ||
|
|
daed58be0f | ||
|
|
ca91d5909b | ||
|
|
3bea2e8d1b | ||
|
|
1325064676 | ||
|
|
a01a371767 | ||
|
|
2d60337eac | ||
|
|
f3ac26e5c9 | ||
|
|
d5a55de17a | ||
|
|
6f497b024b | ||
|
|
a3e8ee6045 | ||
|
|
c1ac6e4244 | ||
|
|
6d98619082 | ||
|
|
52d3169542 | ||
|
|
5989b1a134 | ||
|
|
291bb5c899 | ||
|
|
2ef00efaab | ||
|
|
c5f96466e9 | ||
|
|
35938b57af | ||
|
|
1b1b160c04 | ||
|
|
4149e84e62 | ||
|
|
9408e92e44 | ||
|
|
e9680cab74 | ||
|
|
229610513a | ||
|
|
f9d9c92c83 | ||
|
|
89588d4451 | ||
|
|
3eb911837c | ||
|
|
4b50b27a74 | ||
|
|
f44db89f41 | ||
|
|
8c3189e1be | ||
|
|
eee2145734 | ||
|
|
106710f3d0 | ||
|
|
db8c4f92e8 | ||
|
|
a6cc2c93f8 | ||
|
|
0428ea06f6 | ||
|
|
7082f7014d | ||
|
|
c7c729d81b | ||
|
|
97eb8d43d4 | ||
|
|
1217af1d5f | ||
|
|
13083a77eb | ||
|
|
0cd36b854e | ||
|
|
1d314dd25f | ||
|
|
1743717351 | ||
|
|
acba451803 | ||
|
|
2193e8c79c | ||
|
|
4c6ab984c3 | ||
|
|
7574206a41 | ||
|
|
eebc327b10 | ||
|
|
e19cb012be | ||
|
|
9d1253a61d | ||
|
|
56755b0e9c | ||
|
|
438d1bcfbd | ||
|
|
45a5cf5119 | ||
|
|
b4de055463 | ||
|
|
bb311b750f | ||
|
|
ea8583b2d4 | ||
|
|
eed2ca77ef | ||
|
|
9309d1b574 | ||
|
|
f205d72782 | ||
|
|
3d2fe7841f | ||
|
|
71589f93ca | ||
|
|
a1bfde6af9 | ||
|
|
20b2a70939 | ||
|
|
914811b643 | ||
|
|
0dead39fd1 | ||
|
|
27d7d91185 | ||
|
|
3696062372 | ||
|
|
8ea34b5995 | ||
|
|
403482fa6e | ||
|
|
fe18eae8cd | ||
|
|
3f429a1dab | ||
|
|
22b616b03c | ||
|
|
57eb08c8a2 | ||
|
|
4bc751b7ab | ||
|
|
c423d7d9df | ||
|
|
538e78f135 | ||
|
|
b4bbe3a8ba | ||
|
|
b67f352b90 | ||
|
|
8829575780 | ||
|
|
724adeff5c | ||
|
|
a88a39fb1e | ||
|
|
f986bd83fd | ||
|
|
6113aefde0 | ||
|
|
6d08cf2757 | ||
|
|
2caf23fb71 | ||
|
|
b33328dec5 | ||
|
|
14b31e3fcd | ||
|
|
9fb353ef54 | ||
|
|
ad25a972a1 | ||
|
|
4157f3750b | ||
|
|
d7c5645948 | ||
|
|
8d837eddb3 | ||
|
|
0312455d66 | ||
|
|
e4e83a947a | ||
|
|
2ecc379486 | ||
|
|
bf220666dd | ||
|
|
074ad6d1a4 | ||
|
|
4b815f3769 | ||
|
|
56bb6e1f48 | ||
|
|
5afa686a21 | ||
|
|
25a410719b | ||
|
|
cbfcbba5d1 | ||
|
|
c4421f5f97 | ||
|
|
84c06c4713 | ||
|
|
6df98099f5 | ||
|
|
295f094916 | ||
|
|
d859ab9c39 | ||
|
|
36b868e375 | ||
|
|
4c20be6cf2 | ||
|
|
7bf4620bc1 | ||
|
|
00eff43f4d | ||
|
|
3d3f1b8f74 | ||
|
|
b87516b0be | ||
|
|
8a1d3c4cf9 | ||
|
|
0f25f39404 | ||
|
|
fb49644185 | ||
|
|
b745a29454 | ||
|
|
c940a2921e | ||
|
|
6f8df3279c | ||
|
|
b833e3b10c | ||
|
|
5a0dc4a65a | ||
|
|
e866571e04 | ||
|
|
3c3fc7cd6d | ||
|
|
db919420a7 | ||
|
|
2982cd47a9 | ||
|
|
81550ab5ef | ||
|
|
07402efd79 | ||
|
|
46302f41bc | ||
|
|
9530884c59 | ||
|
|
173b49b4cb | ||
|
|
e581ac890e | ||
|
|
a7b58e4a93 | ||
|
|
d552913171 | ||
|
|
b6a7e45e8d | ||
|
|
6209aeec0b | ||
|
|
1099c59b83 | ||
|
|
9b2ffaaca8 | ||
|
|
aa93cca7bf | ||
|
|
1191f74bfe | ||
|
|
fbd1f6334a | ||
|
|
7d36d63eb1 | ||
|
|
9b85306359 | ||
|
|
cc613e57c9 | ||
|
|
6e63af7ca9 | ||
|
|
5f9af92faf | ||
|
|
4e70e894f6 | ||
|
|
ff090ecf39 | ||
|
|
645a261493 | ||
|
|
8d0611b2a7 | ||
|
|
3d7d3c8af1 | ||
|
|
662b99da92 | ||
|
|
fa25a816a7 | ||
|
|
ee823d215e | ||
|
|
4b450f8173 | ||
|
|
36229d92e0 | ||
|
|
cb90810d02 | ||
|
|
658542cc62 | ||
|
|
701af734cd | ||
|
|
cf53cdf6ba | ||
|
|
6490ace7c7 | ||
|
|
0ac406e8c7 | ||
|
|
e404450e1a | ||
|
|
7cc86ad4c0 | ||
|
|
3acc9ec133 | ||
|
|
286ab7f650 | ||
|
|
7e334203f1 | ||
|
|
c9580ab794 | ||
|
|
e7065af358 | ||
|
|
74695e561a | ||
|
|
c9dbd1d5d1 | ||
|
|
6200890693 | ||
|
|
3011ef9da1 | ||
|
|
bf7b3229d1 | ||
|
|
2c96e042c6 | ||
|
|
9c2278a810 | ||
|
|
332d2d5c68 | ||
|
|
e9158f820f | ||
|
|
1e1733f6db | ||
|
|
5573d85d80 | ||
|
|
c1f881b2d1 | ||
|
|
9bab108329 | ||
|
|
5f4875cc60 | ||
|
|
0c1c6dee99 | ||
|
|
1639f34db0 | ||
|
|
8a866e440c | ||
|
|
7495a7d0cb | ||
|
|
2b1da96c3f | ||
|
|
daa06f1831 | ||
|
|
b97fcfb46d | ||
|
|
852fc9bac1 | ||
|
|
55f44e0245 | ||
|
|
8981e52dcc | ||
|
|
d92dbaea72 | ||
|
|
58f3d0a68c | ||
|
|
45880b3a72 | ||
|
|
992adb9794 | ||
|
|
6d78418e79 | ||
|
|
6e52f1b434 | ||
|
|
c3c1ea727d | ||
|
|
5afc576dec | ||
|
|
50ae32f3e1 | ||
|
|
0451593057 | ||
|
|
be092ac99f | ||
|
|
f73a603226 | ||
|
|
b27249486a | ||
|
|
20c9e232e7 | ||
|
|
d168fd4bfa | ||
|
|
7317975b04 | ||
|
|
39195d0d89 | ||
|
|
6bf0e27b66 | ||
|
|
5fb7e98b7c | ||
|
|
328b6961a2 | ||
|
|
39eabc28b5 | ||
|
|
d97ca68229 | ||
|
|
c92fe6191e | ||
|
|
7bb04003ea | ||
|
|
19dab1fad0 | ||
|
|
5f7b6ecf7f | ||
|
|
dfd3af13cf | ||
|
|
4cc1b79d81 | ||
|
|
4a6f646317 | ||
|
|
b8e21d92bf | ||
|
|
b87d5c5be6 | ||
|
|
ceda06e88d | ||
|
|
eb344881c2 | ||
|
|
01257a6936 | ||
|
|
51b01ebcac | ||
|
|
0a8d66dcc3 | ||
|
|
ec22f1fc53 | ||
|
|
a5e3e4fe7d | ||
|
|
f1a0a8d925 | ||
|
|
ee0dce46de | ||
|
|
b7ee7e19fc | ||
|
|
8291043704 | ||
|
|
bc41b1113a | ||
|
|
77d4a8379d | ||
|
|
c90df623de | ||
|
|
62c45f3bb1 | ||
|
|
96dc9db237 | ||
|
|
5474ab326d | ||
|
|
4940dc2193 | ||
|
|
632282d0df | ||
|
|
33f6c1fe9e | ||
|
|
927d265209 | ||
|
|
bfef0e89e0 | ||
|
|
e9d5db0093 | ||
|
|
bcd46b6aa9 | ||
|
|
66ca1663bf | ||
|
|
944f3417a1 | ||
|
|
193d530b40 | ||
|
|
3b0f3ca761 | ||
|
|
7f5a898cec | ||
|
|
bf6588b573 | ||
|
|
c25fa594fe | ||
|
|
b1dccf3773 | ||
|
|
04686d1721 | ||
|
|
ec08fb078d | ||
|
|
8aa32d410c | ||
|
|
ade03e9f8f | ||
|
|
d253933995 | ||
|
|
150af986fd | ||
|
|
f3340749e8 | ||
|
|
6e0ece496a | ||
|
|
0068ea93de | ||
|
|
6942e491d0 | ||
|
|
22623fad33 | ||
|
|
85f7483b1b | ||
|
|
fbb60941ef | ||
|
|
20e569294d | ||
|
|
117afdb67f | ||
|
|
3df230393a | ||
|
|
8dabe839f3 | ||
|
|
6b63e050ae | ||
|
|
6170a80757 | ||
|
|
5ca794b648 | ||
|
|
f38755b755 | ||
|
|
2153eda9a8 | ||
|
|
83bfca6f2d | ||
|
|
e143e0a051 | ||
|
|
50af7c5bf6 | ||
|
|
846398df41 | ||
|
|
0853a2790f | ||
|
|
ed39f2dc37 | ||
|
|
45fded9842 | ||
|
|
76a34440c3 | ||
|
|
4d200ff0a3 | ||
|
|
f49a2aa9e3 | ||
|
|
83b83326c5 | ||
|
|
3c1779b287 | ||
|
|
22b32fd5c6 | ||
|
|
c4c2d81d24 | ||
|
|
f9a8896486 | ||
|
|
ae1a63f832 | ||
|
|
a05876552c | ||
|
|
b6e813cb9a | ||
|
|
f328772b82 | ||
|
|
604ddad3fa | ||
|
|
66cfc7344e | ||
|
|
a4933b5614 | ||
|
|
e70e27296b | ||
|
|
361ef9236e | ||
|
|
450bb42c46 | ||
|
|
77152b3119 | ||
|
|
e9464f9e68 | ||
|
|
c8c9638e5a | ||
|
|
bd0ca0cded | ||
|
|
96781dbb0f | ||
|
|
19132d15b8 | ||
|
|
6befc6e564 | ||
|
|
441e5fc054 | ||
|
|
43633f2f28 | ||
|
|
3a9f01b9eb | ||
|
|
5e83da9ca1 | ||
|
|
aec4162c22 | ||
|
|
44542fdd6b | ||
|
|
5ad6e99327 | ||
|
|
30018d64a2 | ||
|
|
1c0c1586cb | ||
|
|
524033411e | ||
|
|
3b40158d9a | ||
|
|
4d9115d51e | ||
|
|
146a500f9f | ||
|
|
7d7415b235 | ||
|
|
7aea820cfa | ||
|
|
69b4f155fc | ||
|
|
8f492e4c6c | ||
|
|
8533eba07d | ||
|
|
edf0ab8175 | ||
|
|
45da70cf6a | ||
|
|
2e816656e5 | ||
|
|
6826ce0465 | ||
|
|
c4b5c737f3 | ||
|
|
89a1c0b534 | ||
|
|
74507559b8 | ||
|
|
3ce84f78f1 | ||
|
|
5ba1eeaf4c | ||
|
|
c14d20c2e0 | ||
|
|
f155a13929 | ||
|
|
485caaf2ec | ||
|
|
b44dd28ac0 | ||
|
|
1b0e31027e | ||
|
|
1efb067274 | ||
|
|
b2533b94ce | ||
|
|
441385fc95 | ||
|
|
5f1939cdeb | ||
|
|
9d694ab006 | ||
|
|
48e97477ed | ||
|
|
33dd5fe8cc | ||
|
|
aed2f2dd47 | ||
|
|
eb84f165f4 | ||
|
|
572644f7f9 | ||
|
|
ddbd9dfdc8 | ||
|
|
09578c9a7d | ||
|
|
e5ddfd322d | ||
|
|
87d6544b72 | ||
|
|
fdcd9a376c | ||
|
|
7013a36629 | ||
|
|
bb49d27a84 | ||
|
|
00b76300f5 | ||
|
|
71f3c5c12a | ||
|
|
99ab274216 | ||
|
|
04b10cabc8 | ||
|
|
545717cc51 | ||
|
|
1ca0a15792 | ||
|
|
c5971f03aa | ||
|
|
902403a54d | ||
|
|
1d6ebb7c41 | ||
|
|
106914e14e | ||
|
|
8acb60baef | ||
|
|
1da97d5814 | ||
|
|
5fb2dd0b6e | ||
|
|
ff6c3ce1a0 | ||
|
|
ec51e9d8ce | ||
|
|
cc07992e47 | ||
|
|
069f8b950e | ||
|
|
5eb868e07d | ||
|
|
7c77fc1680 | ||
|
|
99a7867a5e | ||
|
|
c44bf861e0 | ||
|
|
4d38a10f8b | ||
|
|
7c3fc690e9 | ||
|
|
8cf1c2d136 | ||
|
|
fe280b2beb | ||
|
|
ad5c6ee4f5 | ||
|
|
ba0d1ba518 | ||
|
|
70ea1459cd | ||
|
|
8154a190d2 | ||
|
|
29fd1186ee | ||
|
|
68b412badf | ||
|
|
c95aa6a0f7 | ||
|
|
751cd6c862 | ||
|
|
1032bc75d7 | ||
|
|
9415a5ba00 | ||
|
|
d24a4e18a2 | ||
|
|
52f78a86af | ||
|
|
c84c37805c | ||
|
|
c2758caf95 | ||
|
|
73654a25c4 | ||
|
|
e1380f52ec | ||
|
|
406ffcd7de | ||
|
|
d265635f7e | ||
|
|
3d7098855f | ||
|
|
bf49ebb519 | ||
|
|
4c8e8d985c | ||
|
|
a3a7053be7 | ||
|
|
dbecf5cf5e | ||
|
|
bd20d71fc4 | ||
|
|
b80049d533 | ||
|
|
87dbb9b888 | ||
|
|
c78b2344b8 | ||
|
|
eea6ceaec4 | ||
|
|
7750844fc3 | ||
|
|
f0da532db7 | ||
|
|
5180daae87 | ||
|
|
9f12d13dea | ||
|
|
20b1558dd7 | ||
|
|
22656d0114 | ||
|
|
747905a96d | ||
|
|
b6d596b474 | ||
|
|
a36d4480bd | ||
|
|
3fbfe94f5f | ||
|
|
1cd7259852 | ||
|
|
5840b40d96 | ||
|
|
1ef535af7b | ||
|
|
fd3e3d1a19 | ||
|
|
9910ed6e5f | ||
|
|
539acd58f7 | ||
|
|
a11c12cd7b | ||
|
|
e9f486eec6 | ||
|
|
6c3a8a9647 | ||
|
|
2c950713a7 | ||
|
|
8526b801f4 | ||
|
|
10c253471c | ||
|
|
65b9cfbfe2 | ||
|
|
12a304b04f | ||
|
|
bac5b53ffb | ||
|
|
03c28a11e8 | ||
|
|
bcd08b3159 | ||
|
|
599092d76b | ||
|
|
1d2e7d3fd8 | ||
|
|
9d9a812f7b | ||
|
|
b9f78ba42b | ||
|
|
2e890e4d6f | ||
|
|
c1d3da0cab | ||
|
|
4598b1b49d | ||
|
|
693085577d | ||
|
|
33ab6029dc | ||
|
|
dc2e7ca3d5 | ||
|
|
b14a919c35 | ||
|
|
6d8ba9dfa3 | ||
|
|
0fbe4c4de2 | ||
|
|
22a214795d | ||
|
|
f843a5153b | ||
|
|
3c78292618 | ||
|
|
de273dd618 | ||
|
|
0cce39ec7c | ||
|
|
3ee14771e7 | ||
|
|
59697d34f8 | ||
|
|
7efda1c392 | ||
|
|
fb2a04dc14 | ||
|
|
e6baa6fa2c | ||
|
|
9372677f0c | ||
|
|
716300d964 | ||
|
|
b22bdef9e1 | ||
|
|
23dcdd6407 | ||
|
|
09209694a4 | ||
|
|
88013e3b06 | ||
|
|
51fba04226 | ||
|
|
f39fc3e9ca | ||
|
|
e3cd7050fa | ||
|
|
a19226ac64 | ||
|
|
e7a41b3c32 | ||
|
|
224c8bc0a1 | ||
|
|
83ceba3166 | ||
|
|
08c9bd7949 | ||
|
|
4689ebe2ba | ||
|
|
0dce67b149 | ||
|
|
803992cc98 | ||
|
|
890379b64f | ||
|
|
a0ed51c845 | ||
|
|
d802316c5c | ||
|
|
bd3f117545 | ||
|
|
9065932c86 | ||
|
|
700f3ee823 | ||
|
|
adf891bcba | ||
|
|
48e9042970 | ||
|
|
460003c7f5 | ||
|
|
9f20936c86 | ||
|
|
ae9267e0b0 | ||
|
|
b3bff4c72c | ||
|
|
36c9f8bd83 | ||
|
|
696b1340c5 | ||
|
|
881d0525cc | ||
|
|
c100c0bd85 | ||
|
|
5fc99c9ce5 | ||
|
|
f789c72cac | ||
|
|
650328c6f2 | ||
|
|
ffbc5942da | ||
|
|
854a90c3f1 | ||
|
|
d9b0fe2aaa | ||
|
|
6748065456 | ||
|
|
e6526a31c8 | ||
|
|
bf08d21da6 | ||
|
|
807dfec7ad | ||
|
|
c829b52c0f | ||
|
|
f675ea3f5d | ||
|
|
02e18b4293 | ||
|
|
3729011cb0 | ||
|
|
9e565df11b | ||
|
|
4ca45a971c | ||
|
|
89633d8b2a | ||
|
|
0a1c656865 | ||
|
|
d60e988ca1 | ||
|
|
a36adae995 | ||
|
|
1757b360f3 | ||
|
|
8e87c48249 | ||
|
|
3e83eed398 | ||
|
|
4a71eef72e | ||
|
|
a5a4496800 | ||
|
|
172f39e231 | ||
|
|
56ea45f44c | ||
|
|
49a895f117 | ||
|
|
333a989b1a | ||
|
|
707570ca7a | ||
|
|
c76af7d7d6 | ||
|
|
1dcea9bcc8 | ||
|
|
da957e06b6 | ||
|
|
a0b9596cb4 | ||
|
|
f71e8a3a0f | ||
|
|
002fb4547b | ||
|
|
c1b1ba35c1 | ||
|
|
4566d6e80c | ||
|
|
e8d359e625 | ||
|
|
351eba8d61 | ||
|
|
1e27e37b51 | ||
|
|
7df2e9cf11 | ||
|
|
c6e3f1b932 |
@@ -8,6 +8,13 @@ PGDATA="/var/lib/postgresql/data"
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
|
||||
# RabbitMQ Settings
|
||||
RABBITMQ_HOST="plane-mq"
|
||||
RABBITMQ_PORT="5672"
|
||||
RABBITMQ_USER="plane"
|
||||
RABBITMQ_PASSWORD="plane"
|
||||
RABBITMQ_VHOST="plane"
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
/**
|
||||
* Adds three new lint plugins over the existing configuration:
|
||||
* This is used to lint staged files only.
|
||||
* We should remove this file once the entire codebase follows these rules.
|
||||
*/
|
||||
module.exports = {
|
||||
root: true,
|
||||
extends: [
|
||||
"custom",
|
||||
],
|
||||
parser: "@typescript-eslint/parser",
|
||||
settings: {
|
||||
"import/resolver": {
|
||||
typescript: {},
|
||||
node: {
|
||||
moduleDirectory: ["node_modules", "."],
|
||||
},
|
||||
},
|
||||
},
|
||||
rules: {
|
||||
"import/order": [
|
||||
"error",
|
||||
{
|
||||
groups: ["builtin", "external", "internal", "parent", "sibling"],
|
||||
pathGroups: [
|
||||
{
|
||||
pattern: "react",
|
||||
group: "external",
|
||||
position: "before",
|
||||
},
|
||||
{
|
||||
pattern: "lucide-react",
|
||||
group: "external",
|
||||
position: "after",
|
||||
},
|
||||
{
|
||||
pattern: "@headlessui/**",
|
||||
group: "external",
|
||||
position: "after",
|
||||
},
|
||||
{
|
||||
pattern: "@plane/**",
|
||||
group: "external",
|
||||
position: "after",
|
||||
},
|
||||
{
|
||||
pattern: "@/**",
|
||||
group: "internal",
|
||||
},
|
||||
],
|
||||
pathGroupsExcludedImportTypes: ["builtin", "internal", "react"],
|
||||
alphabetize: {
|
||||
order: "asc",
|
||||
caseInsensitive: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
10
.eslintrc.js
10
.eslintrc.js
@@ -1,10 +0,0 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
// This tells ESLint to load the config from the package `eslint-config-custom`
|
||||
extends: ["custom"],
|
||||
settings: {
|
||||
next: {
|
||||
rootDir: ["web/", "space/", "admin/"],
|
||||
},
|
||||
},
|
||||
};
|
||||
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.sh text eol=lf
|
||||
126
.github/actions/build-push-ce/action.yml
vendored
Normal file
126
.github/actions/build-push-ce/action.yml
vendored
Normal file
@@ -0,0 +1,126 @@
|
||||
name: "Build and Push Docker Image"
|
||||
description: "Reusable action for building and pushing Docker images"
|
||||
inputs:
|
||||
docker-username:
|
||||
description: "The Dockerhub username"
|
||||
required: true
|
||||
docker-token:
|
||||
description: "The Dockerhub Token"
|
||||
required: true
|
||||
|
||||
# Docker Image Options
|
||||
docker-image-owner:
|
||||
description: "The owner of the Docker image"
|
||||
required: true
|
||||
docker-image-name:
|
||||
description: "The name of the Docker image"
|
||||
required: true
|
||||
build-context:
|
||||
description: "The build context"
|
||||
required: true
|
||||
default: "."
|
||||
dockerfile-path:
|
||||
description: "The path to the Dockerfile"
|
||||
required: true
|
||||
build-args:
|
||||
description: "The build arguments"
|
||||
required: false
|
||||
default: ""
|
||||
|
||||
# Buildx Options
|
||||
buildx-driver:
|
||||
description: "Buildx driver"
|
||||
required: true
|
||||
default: "docker-container"
|
||||
buildx-version:
|
||||
description: "Buildx version"
|
||||
required: true
|
||||
default: "latest"
|
||||
buildx-platforms:
|
||||
description: "Buildx platforms"
|
||||
required: true
|
||||
default: "linux/amd64"
|
||||
buildx-endpoint:
|
||||
description: "Buildx endpoint"
|
||||
required: true
|
||||
default: "default"
|
||||
|
||||
# Release Build Options
|
||||
build-release:
|
||||
description: "Flag to publish release"
|
||||
required: false
|
||||
default: "false"
|
||||
build-prerelease:
|
||||
description: "Flag to publish prerelease"
|
||||
required: false
|
||||
default: "false"
|
||||
release-version:
|
||||
description: "The release version"
|
||||
required: false
|
||||
default: "latest"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Set Docker Tag
|
||||
shell: bash
|
||||
env:
|
||||
IMG_OWNER: ${{ inputs.docker-image-owner }}
|
||||
IMG_NAME: ${{ inputs.docker-image-name }}
|
||||
BUILD_RELEASE: ${{ inputs.build-release }}
|
||||
IS_PRERELEASE: ${{ inputs.build-prerelease }}
|
||||
REL_VERSION: ${{ inputs.release-version }}
|
||||
run: |
|
||||
FLAT_BRANCH_VERSION=$(echo "${{ github.ref_name }}" | sed 's/[^a-zA-Z0-9.-]//g')
|
||||
|
||||
if [ "${{ env.BUILD_RELEASE }}" == "true" ]; then
|
||||
semver_regex="^v([0-9]+)\.([0-9]+)\.([0-9]+)(-[a-zA-Z0-9]+(-[a-zA-Z0-9]+)*)?$"
|
||||
if [[ ! ${{ env.REL_VERSION }} =~ $semver_regex ]]; then
|
||||
echo "Invalid Release Version Format : ${{ env.REL_VERSION }}"
|
||||
echo "Please provide a valid SemVer version"
|
||||
echo "e.g. v1.2.3 or v1.2.3-alpha-1"
|
||||
echo "Exiting the build process"
|
||||
exit 1 # Exit with status 1 to fail the step
|
||||
fi
|
||||
|
||||
TAG=${{ env.IMG_OWNER }}/${{ env.IMG_NAME }}:${{ env.REL_VERSION }}
|
||||
|
||||
if [ "${{ env.IS_PRERELEASE }}" != "true" ]; then
|
||||
TAG=${TAG},${{ env.IMG_OWNER }}/${{ env.IMG_NAME }}:stable
|
||||
fi
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=${{ env.IMG_OWNER }}/${{ env.IMG_NAME }}:latest
|
||||
else
|
||||
TAG=${{ env.IMG_OWNER }}/${{ env.IMG_NAME }}:${FLAT_BRANCH_VERSION}
|
||||
fi
|
||||
|
||||
echo "DOCKER_TAGS=${TAG}" >> $GITHUB_ENV
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ inputs.docker-username }}
|
||||
password: ${{ inputs.docker-token}}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ inputs.buildx-driver }}
|
||||
version: ${{ inputs.buildx-version }}
|
||||
endpoint: ${{ inputs.buildx-endpoint }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Docker Image
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: ${{ inputs.build-context }}
|
||||
file: ${{ inputs.dockerfile-path }}
|
||||
platforms: ${{ inputs.buildx-platforms }}
|
||||
tags: ${{ env.DOCKER_TAGS }}
|
||||
push: true
|
||||
build-args: ${{ inputs.build-args }}
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ inputs.docker-username }}
|
||||
DOCKER_PASSWORD: ${{ inputs.docker-token }}
|
||||
4
.github/workflows/build-aio-base.yml
vendored
4
.github/workflows/build-aio-base.yml
vendored
@@ -83,7 +83,7 @@ jobs:
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
uses: docker/build-push-action@v6.9.0
|
||||
with:
|
||||
context: ./aio
|
||||
file: ./aio/Dockerfile-base-full
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
uses: docker/build-push-action@v6.9.0
|
||||
with:
|
||||
context: ./aio
|
||||
file: ./aio/Dockerfile-base-slim
|
||||
|
||||
4
.github/workflows/build-aio-branch.yml
vendored
4
.github/workflows/build-aio-branch.yml
vendored
@@ -128,7 +128,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
uses: docker/build-push-action@v6.9.0
|
||||
with:
|
||||
context: .
|
||||
file: ./aio/Dockerfile-app
|
||||
@@ -188,7 +188,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
uses: docker/build-push-action@v6.9.0
|
||||
with:
|
||||
context: .
|
||||
file: ./aio/Dockerfile-app
|
||||
|
||||
536
.github/workflows/build-branch.yml
vendored
536
.github/workflows/build-branch.yml
vendored
@@ -1,21 +1,45 @@
|
||||
name: Branch Build
|
||||
name: Branch Build CE
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- preview
|
||||
release:
|
||||
types: [released, prereleased]
|
||||
inputs:
|
||||
build_type:
|
||||
description: "Type of build to run"
|
||||
required: true
|
||||
type: choice
|
||||
default: "Build"
|
||||
options:
|
||||
- "Build"
|
||||
- "Release"
|
||||
releaseVersion:
|
||||
description: "Release Version"
|
||||
type: string
|
||||
default: v0.0.0
|
||||
isPrerelease:
|
||||
description: "Is Pre-release"
|
||||
type: boolean
|
||||
default: false
|
||||
required: true
|
||||
arm64:
|
||||
description: "Build for ARM64 architecture"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
# push:
|
||||
# branches:
|
||||
# - master
|
||||
|
||||
env:
|
||||
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
|
||||
TARGET_BRANCH: ${{ github.ref_name }}
|
||||
ARM64_BUILD: ${{ github.event.inputs.arm64 }}
|
||||
BUILD_TYPE: ${{ github.event.inputs.build_type }}
|
||||
RELEASE_VERSION: ${{ github.event.inputs.releaseVersion }}
|
||||
IS_PRERELEASE: ${{ github.event.inputs.isPrerelease }}
|
||||
|
||||
jobs:
|
||||
branch_build_setup:
|
||||
name: Build Setup
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
outputs:
|
||||
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
||||
@@ -27,12 +51,25 @@ jobs:
|
||||
build_admin: ${{ steps.changed_files.outputs.admin_any_changed }}
|
||||
build_space: ${{ steps.changed_files.outputs.space_any_changed }}
|
||||
build_web: ${{ steps.changed_files.outputs.web_any_changed }}
|
||||
build_live: ${{ steps.changed_files.outputs.live_any_changed }}
|
||||
|
||||
dh_img_web: ${{ steps.set_env_variables.outputs.DH_IMG_WEB }}
|
||||
dh_img_space: ${{ steps.set_env_variables.outputs.DH_IMG_SPACE }}
|
||||
dh_img_admin: ${{ steps.set_env_variables.outputs.DH_IMG_ADMIN }}
|
||||
dh_img_live: ${{ steps.set_env_variables.outputs.DH_IMG_LIVE }}
|
||||
dh_img_backend: ${{ steps.set_env_variables.outputs.DH_IMG_BACKEND }}
|
||||
dh_img_proxy: ${{ steps.set_env_variables.outputs.DH_IMG_PROXY }}
|
||||
|
||||
build_type: ${{steps.set_env_variables.outputs.BUILD_TYPE}}
|
||||
build_release: ${{ steps.set_env_variables.outputs.BUILD_RELEASE }}
|
||||
build_prerelease: ${{ steps.set_env_variables.outputs.BUILD_PRERELEASE }}
|
||||
release_version: ${{ steps.set_env_variables.outputs.RELEASE_VERSION }}
|
||||
|
||||
steps:
|
||||
- id: set_env_variables
|
||||
name: Set Environment Variables
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ] || [ "${{ github.event_name }}" == "release" ]; then
|
||||
if [ "${{ env.ARM64_BUILD }}" == "true" ] || ([ "${{ env.BUILD_TYPE }}" == "Release" ] && [ "${{ env.IS_PRERELEASE }}" != "true" ]); then
|
||||
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||
@@ -43,7 +80,43 @@ jobs:
|
||||
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
BR_NAME=$( echo "${{ env.TARGET_BRANCH }}" |sed 's/[^a-zA-Z0-9.-]//g')
|
||||
echo "TARGET_BRANCH=$BR_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "DH_IMG_WEB=plane-frontend" >> $GITHUB_OUTPUT
|
||||
echo "DH_IMG_SPACE=plane-space" >> $GITHUB_OUTPUT
|
||||
echo "DH_IMG_ADMIN=plane-admin" >> $GITHUB_OUTPUT
|
||||
echo "DH_IMG_LIVE=plane-live" >> $GITHUB_OUTPUT
|
||||
echo "DH_IMG_BACKEND=plane-backend" >> $GITHUB_OUTPUT
|
||||
echo "DH_IMG_PROXY=plane-proxy" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "BUILD_TYPE=${{env.BUILD_TYPE}}" >> $GITHUB_OUTPUT
|
||||
BUILD_RELEASE=false
|
||||
BUILD_PRERELEASE=false
|
||||
RELVERSION="latest"
|
||||
|
||||
if [ "${{ env.BUILD_TYPE }}" == "Release" ]; then
|
||||
FLAT_RELEASE_VERSION=$(echo "${{ env.RELEASE_VERSION }}" | sed 's/[^a-zA-Z0-9.-]//g')
|
||||
echo "FLAT_RELEASE_VERSION=${FLAT_RELEASE_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
semver_regex="^v([0-9]+)\.([0-9]+)\.([0-9]+)(-[a-zA-Z0-9]+(-[a-zA-Z0-9]+)*)?$"
|
||||
if [[ ! $FLAT_RELEASE_VERSION =~ $semver_regex ]]; then
|
||||
echo "Invalid Release Version Format : $FLAT_RELEASE_VERSION"
|
||||
echo "Please provide a valid SemVer version"
|
||||
echo "e.g. v1.2.3 or v1.2.3-alpha-1"
|
||||
echo "Exiting the build process"
|
||||
exit 1 # Exit with status 1 to fail the step
|
||||
fi
|
||||
BUILD_RELEASE=true
|
||||
RELVERSION=$FLAT_RELEASE_VERSION
|
||||
|
||||
if [ "${{ env.IS_PRERELEASE }}" == "true" ]; then
|
||||
BUILD_PRERELEASE=true
|
||||
fi
|
||||
fi
|
||||
echo "BUILD_RELEASE=${BUILD_RELEASE}" >> $GITHUB_OUTPUT
|
||||
echo "BUILD_PRERELEASE=${BUILD_PRERELEASE}" >> $GITHUB_OUTPUT
|
||||
echo "RELEASE_VERSION=${RELVERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
@@ -61,281 +134,250 @@ jobs:
|
||||
admin:
|
||||
- admin/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
space:
|
||||
- space/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
web:
|
||||
- web/**
|
||||
- packages/**
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
live:
|
||||
- live/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
|
||||
branch_build_push_web:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_web == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
FRONTEND_TAG: makeplane/plane-frontend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Frontend Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/plane-frontend:stable,makeplane/plane-frontend:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-frontend:latest
|
||||
else
|
||||
TAG=${{ env.FRONTEND_TAG }}
|
||||
fi
|
||||
echo "FRONTEND_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Frontend to Docker Container Registry
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./web/Dockerfile.web
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.FRONTEND_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_admin:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_admin== 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
if: ${{ needs.branch_build_setup.outputs.build_admin == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Admin Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
ADMIN_TAG: makeplane/plane-admin:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Admin Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/plane-admin:stable,makeplane/plane-admin:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-admin:latest
|
||||
else
|
||||
TAG=${{ env.ADMIN_TAG }}
|
||||
fi
|
||||
echo "ADMIN_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Frontend to Docker Container Registry
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
- name: Admin Build and Push
|
||||
uses: ./.github/actions/build-push-ce
|
||||
with:
|
||||
context: .
|
||||
file: ./admin/Dockerfile.admin
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.ADMIN_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
|
||||
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
|
||||
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
docker-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_admin }}
|
||||
build-context: .
|
||||
dockerfile-path: ./admin/Dockerfile.admin
|
||||
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_web:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_web == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Web Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
steps:
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
- name: Web Build and Push
|
||||
uses: ./.github/actions/build-push-ce
|
||||
with:
|
||||
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
|
||||
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
|
||||
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
docker-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_web }}
|
||||
build-context: .
|
||||
dockerfile-path: ./web/Dockerfile.web
|
||||
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_space:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Space Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
SPACE_TAG: makeplane/plane-space:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Space Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/plane-space:stable,makeplane/plane-space:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-space:latest
|
||||
else
|
||||
TAG=${{ env.SPACE_TAG }}
|
||||
fi
|
||||
echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Space to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
- name: Space Build and Push
|
||||
uses: ./.github/actions/build-push-ce
|
||||
with:
|
||||
context: .
|
||||
file: ./space/Dockerfile.space
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.SPACE_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
|
||||
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
|
||||
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
docker-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_space }}
|
||||
build-context: .
|
||||
dockerfile-path: ./space/Dockerfile.space
|
||||
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_live:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_live == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Live Collaboration Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
steps:
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
- name: Live Build and Push
|
||||
uses: ./.github/actions/build-push-ce
|
||||
with:
|
||||
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
|
||||
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
|
||||
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
docker-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_live }}
|
||||
build-context: .
|
||||
dockerfile-path: ./live/Dockerfile.live
|
||||
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_apiserver:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_apiserver == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
if: ${{ needs.branch_build_setup.outputs.build_apiserver == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push API Server Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BACKEND_TAG: makeplane/plane-backend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Backend Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/plane-backend:stable,makeplane/plane-backend:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-backend:latest
|
||||
else
|
||||
TAG=${{ env.BACKEND_TAG }}
|
||||
fi
|
||||
echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Backend to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
- name: Backend Build and Push
|
||||
uses: ./.github/actions/build-push-ce
|
||||
with:
|
||||
context: ./apiserver
|
||||
file: ./apiserver/Dockerfile.api
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
push: true
|
||||
tags: ${{ env.BACKEND_TAG }}
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
|
||||
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
|
||||
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
docker-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_backend }}
|
||||
build-context: ./apiserver
|
||||
dockerfile-path: ./apiserver/Dockerfile.api
|
||||
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_proxy:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Proxy Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
PROXY_TAG: makeplane/plane-proxy:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
steps:
|
||||
- name: Set Proxy Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=makeplane/plane-proxy:stable,makeplane/plane-proxy:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=makeplane/plane-proxy:latest
|
||||
else
|
||||
TAG=${{ env.PROXY_TAG }}
|
||||
fi
|
||||
echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
- name: Proxy Build and Push
|
||||
uses: ./.github/actions/build-push-ce
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
|
||||
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
|
||||
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
docker-username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
docker-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
docker-image-owner: makeplane
|
||||
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_proxy }}
|
||||
build-context: ./nginx
|
||||
dockerfile-path: ./nginx/Dockerfile
|
||||
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: ${{ env.BUILDX_DRIVER }}
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
attach_assets_to_build:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_type == 'Build' }}
|
||||
name: Attach Assets to Build
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Plane-Proxy to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
- name: Update Assets
|
||||
run: |
|
||||
cp ./deploy/selfhost/install.sh deploy/selfhost/setup.sh
|
||||
|
||||
- name: Attach Assets
|
||||
id: attach_assets
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
context: ./nginx
|
||||
file: ./nginx/Dockerfile
|
||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||
tags: ${{ env.PROXY_TAG }}
|
||||
push: true
|
||||
name: selfhost-assets
|
||||
retention-days: 2
|
||||
path: |
|
||||
${{ github.workspace }}/deploy/selfhost/setup.sh
|
||||
${{ github.workspace }}/deploy/selfhost/restore.sh
|
||||
${{ github.workspace }}/deploy/selfhost/docker-compose.yml
|
||||
${{ github.workspace }}/deploy/selfhost/variables.env
|
||||
|
||||
publish_release:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_type == 'Release' }}
|
||||
name: Build Release
|
||||
runs-on: ubuntu-20.04
|
||||
needs:
|
||||
[
|
||||
branch_build_setup,
|
||||
branch_build_push_admin,
|
||||
branch_build_push_web,
|
||||
branch_build_push_space,
|
||||
branch_build_push_live,
|
||||
branch_build_push_apiserver,
|
||||
branch_build_push_proxy,
|
||||
]
|
||||
env:
|
||||
REL_VERSION: ${{ needs.branch_build_setup.outputs.release_version }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Update Assets
|
||||
run: |
|
||||
cp ./deploy/selfhost/install.sh deploy/selfhost/setup.sh
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: softprops/action-gh-release@v2.1.0
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token
|
||||
with:
|
||||
tag_name: ${{ env.REL_VERSION }}
|
||||
name: ${{ env.REL_VERSION }}
|
||||
draft: false
|
||||
prerelease: ${{ env.IS_PRERELEASE }}
|
||||
generate_release_notes: true
|
||||
files: |
|
||||
${{ github.workspace }}/deploy/selfhost/setup.sh
|
||||
${{ github.workspace }}/deploy/selfhost/restore.sh
|
||||
${{ github.workspace }}/deploy/selfhost/docker-compose.yml
|
||||
${{ github.workspace }}/deploy/selfhost/variables.env
|
||||
|
||||
8
.github/workflows/codeql.yml
vendored
8
.github/workflows/codeql.yml
vendored
@@ -29,11 +29,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -59,6 +59,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
2
.github/workflows/feature-deployment.yml
vendored
2
.github/workflows/feature-deployment.yml
vendored
@@ -79,7 +79,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
uses: docker/build-push-action@v6.9.0
|
||||
with:
|
||||
context: .
|
||||
file: ./aio/Dockerfile-app
|
||||
|
||||
@@ -8,37 +8,20 @@ on:
|
||||
|
||||
env:
|
||||
CURRENT_BRANCH: ${{ github.ref_name }}
|
||||
SOURCE_BRANCH: ${{ vars.SYNC_SOURCE_BRANCH_NAME }} # The sync branch such as "sync/ce"
|
||||
TARGET_BRANCH: ${{ vars.SYNC_TARGET_BRANCH_NAME }} # The target branch that you would like to merge changes like develop
|
||||
TARGET_BRANCH: "preview" # The target branch that you would like to merge changes like develop
|
||||
GITHUB_TOKEN: ${{ secrets.ACCESS_TOKEN }} # Personal access token required to modify contents and workflows
|
||||
REVIEWER: ${{ vars.SYNC_PR_REVIEWER }}
|
||||
ACCOUNT_USER_NAME: ${{ vars.ACCOUNT_USER_NAME }}
|
||||
ACCOUNT_USER_EMAIL: ${{ vars.ACCOUNT_USER_EMAIL }}
|
||||
|
||||
jobs:
|
||||
Check_Branch:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
BRANCH_MATCH: ${{ steps.check-branch.outputs.MATCH }}
|
||||
steps:
|
||||
- name: Check if current branch matches the secret
|
||||
id: check-branch
|
||||
run: |
|
||||
if [ "$CURRENT_BRANCH" = "$SOURCE_BRANCH" ]; then
|
||||
echo "MATCH=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "MATCH=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
Create_PR:
|
||||
if: ${{ needs.Check_Branch.outputs.BRANCH_MATCH == 'true' }}
|
||||
needs: [Check_Branch]
|
||||
create_pull_request:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4.1.1
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Fetch all history for all branches and tags
|
||||
|
||||
@@ -59,11 +42,11 @@ jobs:
|
||||
- name: Create PR to Target Branch
|
||||
run: |
|
||||
# get all pull requests and check if there is already a PR
|
||||
PR_EXISTS=$(gh pr list --base $TARGET_BRANCH --head $SOURCE_BRANCH --state open --json number | jq '.[] | .number')
|
||||
PR_EXISTS=$(gh pr list --base $TARGET_BRANCH --head $CURRENT_BRANCH --state open --json number | jq '.[] | .number')
|
||||
if [ -n "$PR_EXISTS" ]; then
|
||||
echo "Pull Request already exists: $PR_EXISTS"
|
||||
else
|
||||
echo "Creating new pull request"
|
||||
PR_URL=$(gh pr create --base $TARGET_BRANCH --head $SOURCE_BRANCH --title "sync: community changes" --body "")
|
||||
PR_URL=$(gh pr create --base $TARGET_BRANCH --head $CURRENT_BRANCH --title "${{ vars.SYNC_PR_TITLE }}" --body "")
|
||||
echo "Pull Request created: $PR_URL"
|
||||
fi
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4.1.1
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
16
.idx/dev.nix
Normal file
16
.idx/dev.nix
Normal file
@@ -0,0 +1,16 @@
|
||||
{ pkgs, ... }: {
|
||||
|
||||
# Which nixpkgs channel to use.
|
||||
channel = "stable-23.11"; # or "unstable"
|
||||
|
||||
# Use https://search.nixos.org/packages to find packages
|
||||
packages = [
|
||||
pkgs.nodejs_20
|
||||
pkgs.python3
|
||||
];
|
||||
|
||||
services.docker.enable = true;
|
||||
services.postgres.enable = true;
|
||||
services.redis.enable = true;
|
||||
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"*.{ts,tsx,js,jsx}": ["eslint -c ./.eslintrc-staged.js", "prettier --check"]
|
||||
}
|
||||
@@ -4,7 +4,7 @@ Thank you for showing an interest in contributing to Plane! All kinds of contrib
|
||||
|
||||
## Submitting an issue
|
||||
|
||||
Before submitting a new issue, please search the [issues](https://github.com/makeplane/plane/issues) tab. Maybe an issue or discussion already exists and might inform you of workarounds. Otherwise, you can give new informplaneation.
|
||||
Before submitting a new issue, please search the [issues](https://github.com/makeplane/plane/issues) tab. Maybe an issue or discussion already exists and might inform you of workarounds. Otherwise, you can give new information.
|
||||
|
||||
While we want to fix all the [issues](https://github.com/makeplane/plane/issues), before fixing a bug we need to be able to reproduce and confirm it. Please provide us with a minimal reproduction scenario using a repository or [Gist](https://gist.github.com/). Having a live, reproducible scenario gives us the information without asking questions back & forth with additional questions like:
|
||||
|
||||
|
||||
65
SECURITY.md
65
SECURITY.md
@@ -1,44 +1,39 @@
|
||||
# Security Policy
|
||||
# Security policy
|
||||
This document outlines the security protocols and vulnerability reporting guidelines for the Plane project. Ensuring the security of our systems is a top priority, and while we work diligently to maintain robust protection, vulnerabilities may still occur. We highly value the community’s role in identifying and reporting security concerns to uphold the integrity of our systems and safeguard our users.
|
||||
|
||||
This document outlines security procedures and vulnerabilities reporting for the Plane project.
|
||||
## Reporting a vulnerability
|
||||
If you have identified a security vulnerability, submit your findings to [security@plane.so](mailto:security@plane.so).
|
||||
Ensure your report includes all relevant information needed for us to reproduce and assess the issue. Include the IP address or URL of the affected system.
|
||||
|
||||
At Plane, we safeguarding the security of our systems with top priority. Despite our efforts, vulnerabilities may still exist. We greatly appreciate your assistance in identifying and reporting any such vulnerabilities to help us maintain the integrity of our systems and protect our clients.
|
||||
To ensure a responsible and effective disclosure process, please adhere to the following:
|
||||
|
||||
To report a security vulnerability, please email us directly at security@plane.so with a detailed description of the vulnerability and steps to reproduce it. Please refrain from disclosing the vulnerability publicly until we have had an opportunity to review and address it.
|
||||
- Maintain confidentiality and refrain from publicly disclosing the vulnerability until we have had the opportunity to investigate and address the issue.
|
||||
- Refrain from running automated vulnerability scans on our infrastructure or dashboard without prior consent. Contact us to set up a sandbox environment if necessary.
|
||||
- Do not exploit any discovered vulnerabilities for malicious purposes, such as accessing or altering user data.
|
||||
- Do not engage in physical security attacks, social engineering, distributed denial of service (DDoS) attacks, spam campaigns, or attacks on third-party applications as part of your vulnerability testing.
|
||||
|
||||
## Out of Scope Vulnerabilities
|
||||
## Out of scope
|
||||
While we appreciate all efforts to assist in improving our security, please note that the following types of vulnerabilities are considered out of scope:
|
||||
|
||||
We appreciate your help in identifying vulnerabilities. However, please note that the following types of vulnerabilities are considered out of scope:
|
||||
- Vulnerabilities requiring man-in-the-middle (MITM) attacks or physical access to a user’s device.
|
||||
- Content spoofing or text injection issues without a clear attack vector or the ability to modify HTML/CSS.
|
||||
- Issues related to email spoofing.
|
||||
- Missing DNSSEC, CAA, or CSP headers.
|
||||
- Absence of secure or HTTP-only flags on non-sensitive cookies.
|
||||
|
||||
- Attacks requiring MITM or physical access to a user's device.
|
||||
- Content spoofing and text injection issues without demonstrating an attack vector or ability to modify HTML/CSS.
|
||||
- Email spoofing.
|
||||
- Missing DNSSEC, CAA, CSP headers.
|
||||
- Lack of Secure or HTTP only flag on non-sensitive cookies.
|
||||
## Our commitment
|
||||
|
||||
## Reporting Process
|
||||
At Plane, we are committed to maintaining transparent and collaborative communication throughout the vulnerability resolution process. Here's what you can expect from us:
|
||||
|
||||
If you discover a vulnerability, please adhere to the following reporting process:
|
||||
- **Response Time** <br/>
|
||||
We will acknowledge receipt of your vulnerability report within three business days and provide an estimated timeline for resolution.
|
||||
- **Legal Protection** <br/>
|
||||
We will not initiate legal action against you for reporting vulnerabilities, provided you adhere to the reporting guidelines.
|
||||
- **Confidentiality** <br/>
|
||||
Your report will be treated with confidentiality. We will not disclose your personal information to third parties without your consent.
|
||||
- **Recognition** <br/>
|
||||
With your permission, we are happy to publicly acknowledge your contribution to improving our security once the issue is resolved.
|
||||
- **Timely Resolution** <br/>
|
||||
We are committed to working closely with you throughout the resolution process, providing timely updates as necessary. Our goal is to address all reported vulnerabilities swiftly, and we will actively engage with you to coordinate a responsible disclosure once the issue is fully resolved.
|
||||
|
||||
1. Email your findings to security@plane.so.
|
||||
2. Refrain from running automated scanners on our infrastructure or dashboard without prior consent. Contact us to set up a sandbox environment if necessary.
|
||||
3. Do not exploit the vulnerability for malicious purposes, such as downloading excessive data or altering user data.
|
||||
4. Maintain confidentiality and refrain from disclosing the vulnerability until it has been resolved.
|
||||
5. Avoid using physical security attacks, social engineering, distributed denial of service, spam, or third-party applications.
|
||||
|
||||
When reporting a vulnerability, please provide sufficient information to allow us to reproduce and address the issue promptly. Include the IP address or URL of the affected system, along with a detailed description of the vulnerability.
|
||||
|
||||
## Our Commitment
|
||||
|
||||
We are committed to promptly addressing reported vulnerabilities and maintaining open communication throughout the resolution process. Here's what you can expect from us:
|
||||
|
||||
- **Response Time:** We will acknowledge receipt of your report within three business days and provide an expected resolution date.
|
||||
- **Legal Protection:** We will not pursue legal action against you for reporting vulnerabilities, provided you adhere to the reporting guidelines.
|
||||
- **Confidentiality:** Your report will be treated with strict confidentiality. We will not disclose your personal information to third parties without your consent.
|
||||
- **Progress Updates:** We will keep you informed of our progress in resolving the reported vulnerability.
|
||||
- **Recognition:** With your permission, we will publicly acknowledge you as the discoverer of the vulnerability.
|
||||
- **Timely Resolution:** We strive to resolve all reported vulnerabilities promptly and will actively participate in the publication process once the issue is resolved.
|
||||
|
||||
We appreciate your cooperation in helping us maintain the security of our systems and protecting our clients. Thank you for your contributions to our security efforts.
|
||||
|
||||
reference: https://supabase.com/.well-known/security.txt
|
||||
We appreciate your help in ensuring the security of our platform. Your contributions are crucial to protecting our users and maintaining a secure environment. Thank you for working with us to keep Plane safe.
|
||||
@@ -1,52 +1,8 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
extends: ["custom"],
|
||||
extends: ["@plane/eslint-config/next.js"],
|
||||
parser: "@typescript-eslint/parser",
|
||||
settings: {
|
||||
"import/resolver": {
|
||||
typescript: {},
|
||||
node: {
|
||||
moduleDirectory: ["node_modules", "."],
|
||||
},
|
||||
},
|
||||
parserOptions: {
|
||||
project: true,
|
||||
},
|
||||
rules: {
|
||||
"import/order": [
|
||||
"error",
|
||||
{
|
||||
groups: ["builtin", "external", "internal", "parent", "sibling",],
|
||||
pathGroups: [
|
||||
{
|
||||
pattern: "react",
|
||||
group: "external",
|
||||
position: "before",
|
||||
},
|
||||
{
|
||||
pattern: "lucide-react",
|
||||
group: "external",
|
||||
position: "after",
|
||||
},
|
||||
{
|
||||
pattern: "@headlessui/**",
|
||||
group: "external",
|
||||
position: "after",
|
||||
},
|
||||
{
|
||||
pattern: "@plane/**",
|
||||
group: "external",
|
||||
position: "after",
|
||||
},
|
||||
{
|
||||
pattern: "@/**",
|
||||
group: "internal",
|
||||
}
|
||||
],
|
||||
pathGroupsExcludedImportTypes: ["builtin", "internal", "react"],
|
||||
alphabetize: {
|
||||
order: "asc",
|
||||
caseInsensitive: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
@@ -10,8 +10,9 @@ import {
|
||||
// components
|
||||
import { AuthenticationMethodCard } from "@/components/authentication";
|
||||
// helpers
|
||||
import { UpgradeButton } from "@/components/common/upgrade-button";
|
||||
import { getBaseAuthenticationModes } from "@/helpers/authentication.helper";
|
||||
// plane admin components
|
||||
import { UpgradeButton } from "@/plane-admin/components/common";
|
||||
// images
|
||||
import OIDCLogo from "@/public/logos/oidc-logo.svg";
|
||||
import SAMLLogo from "@/public/logos/saml-logo.svg";
|
||||
@@ -27,24 +28,24 @@ export const getAuthenticationModes: (props: TGetBaseAuthenticationModeProps) =>
|
||||
updateConfig,
|
||||
resolvedTheme,
|
||||
}) => [
|
||||
...getBaseAuthenticationModes({ disabled, updateConfig, resolvedTheme }),
|
||||
{
|
||||
key: "oidc",
|
||||
name: "OIDC",
|
||||
description: "Authenticate your users via the OpenID Connect protocol.",
|
||||
icon: <Image src={OIDCLogo} height={22} width={22} alt="OIDC Logo" />,
|
||||
config: <UpgradeButton />,
|
||||
unavailable: true,
|
||||
},
|
||||
{
|
||||
key: "saml",
|
||||
name: "SAML",
|
||||
description: "Authenticate your users via the Security Assertion Markup Language protocol.",
|
||||
icon: <Image src={SAMLLogo} height={22} width={22} alt="SAML Logo" className="pl-0.5" />,
|
||||
config: <UpgradeButton />,
|
||||
unavailable: true,
|
||||
},
|
||||
];
|
||||
...getBaseAuthenticationModes({ disabled, updateConfig, resolvedTheme }),
|
||||
{
|
||||
key: "oidc",
|
||||
name: "OIDC",
|
||||
description: "Authenticate your users via the OpenID Connect protocol.",
|
||||
icon: <Image src={OIDCLogo} height={22} width={22} alt="OIDC Logo" />,
|
||||
config: <UpgradeButton />,
|
||||
unavailable: true,
|
||||
},
|
||||
{
|
||||
key: "saml",
|
||||
name: "SAML",
|
||||
description: "Authenticate your users via the Security Assertion Markup Language protocol.",
|
||||
icon: <Image src={SAMLLogo} height={22} width={22} alt="SAML Logo" className="pl-0.5" />,
|
||||
config: <UpgradeButton />,
|
||||
unavailable: true,
|
||||
},
|
||||
];
|
||||
|
||||
export const AuthenticationModes: React.FC<TAuthenticationModeProps> = observer((props) => {
|
||||
const { disabled, updateConfig } = props;
|
||||
|
||||
1
admin/ce/components/common/index.ts
Normal file
1
admin/ce/components/common/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from "./upgrade-button";
|
||||
19
admin/ce/store/root.store.ts
Normal file
19
admin/ce/store/root.store.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { enableStaticRendering } from "mobx-react";
|
||||
// stores
|
||||
import { CoreRootStore } from "@/store/root.store";
|
||||
|
||||
enableStaticRendering(typeof window === "undefined");
|
||||
|
||||
export class RootStore extends CoreRootStore {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
hydrate(initialData: any) {
|
||||
super.hydrate(initialData);
|
||||
}
|
||||
|
||||
resetOnSignOut() {
|
||||
super.resetOnSignOut();
|
||||
}
|
||||
}
|
||||
@@ -2,15 +2,14 @@
|
||||
|
||||
import { FC, useEffect, useRef } from "react";
|
||||
import { observer } from "mobx-react";
|
||||
// hooks
|
||||
import { HelpSection, SidebarMenu, SidebarDropdown } from "@/components/admin-sidebar";
|
||||
import { useTheme } from "@/hooks/store";
|
||||
import useOutsideClickDetector from "@/hooks/use-outside-click-detector";
|
||||
// plane helpers
|
||||
import { useOutsideClickDetector } from "@plane/helpers";
|
||||
// components
|
||||
import { HelpSection, SidebarMenu, SidebarDropdown } from "@/components/admin-sidebar";
|
||||
// hooks
|
||||
import { useTheme } from "@/hooks/store";
|
||||
|
||||
export interface IInstanceSidebar {}
|
||||
|
||||
export const InstanceSidebar: FC<IInstanceSidebar> = observer(() => {
|
||||
export const InstanceSidebar: FC = observer(() => {
|
||||
// store
|
||||
const { isSidebarCollapsed, toggleSidebar } = useTheme();
|
||||
|
||||
|
||||
@@ -5,11 +5,13 @@ import { observer } from "mobx-react";
|
||||
import { useTheme as useNextTheme } from "next-themes";
|
||||
import { LogOut, UserCog2, Palette } from "lucide-react";
|
||||
import { Menu, Transition } from "@headlessui/react";
|
||||
// plane ui
|
||||
import { Avatar } from "@plane/ui";
|
||||
// hooks
|
||||
import { API_BASE_URL, cn } from "@/helpers/common.helper";
|
||||
import { useTheme, useUser } from "@/hooks/store";
|
||||
// helpers
|
||||
import { API_BASE_URL, cn } from "@/helpers/common.helper";
|
||||
import { getFileURL } from "@/helpers/file.helper";
|
||||
// hooks
|
||||
import { useTheme, useUser } from "@/hooks/store";
|
||||
// services
|
||||
import { AuthService } from "@/services/auth.service";
|
||||
|
||||
@@ -122,7 +124,7 @@ export const SidebarDropdown = observer(() => {
|
||||
<Menu.Button className="grid place-items-center outline-none">
|
||||
<Avatar
|
||||
name={currentUser.display_name}
|
||||
src={currentUser.avatar ?? undefined}
|
||||
src={getFileURL(currentUser.avatar_url)}
|
||||
size={24}
|
||||
shape="square"
|
||||
className="!text-base"
|
||||
|
||||
29
admin/core/components/authentication/auth-banner.tsx
Normal file
29
admin/core/components/authentication/auth-banner.tsx
Normal file
@@ -0,0 +1,29 @@
|
||||
import { FC } from "react";
|
||||
import { Info, X } from "lucide-react";
|
||||
// helpers
|
||||
import { TAuthErrorInfo } from "@/helpers/authentication.helper";
|
||||
|
||||
type TAuthBanner = {
|
||||
bannerData: TAuthErrorInfo | undefined;
|
||||
handleBannerData?: (bannerData: TAuthErrorInfo | undefined) => void;
|
||||
};
|
||||
|
||||
export const AuthBanner: FC<TAuthBanner> = (props) => {
|
||||
const { bannerData, handleBannerData } = props;
|
||||
|
||||
if (!bannerData) return <></>;
|
||||
return (
|
||||
<div className="relative flex items-center p-2 rounded-md gap-2 border border-custom-primary-100/50 bg-custom-primary-100/10">
|
||||
<div className="w-4 h-4 flex-shrink-0 relative flex justify-center items-center">
|
||||
<Info size={16} className="text-custom-primary-100" />
|
||||
</div>
|
||||
<div className="w-full text-sm font-medium text-custom-primary-100">{bannerData?.message}</div>
|
||||
<div
|
||||
className="relative ml-auto w-6 h-6 rounded-sm flex justify-center items-center transition-all cursor-pointer hover:bg-custom-primary-100/20 text-custom-primary-100/80"
|
||||
onClick={() => handleBannerData && handleBannerData(undefined)}
|
||||
>
|
||||
<X className="w-4 h-4 flex-shrink-0" />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -1,3 +1,4 @@
|
||||
export * from "./auth-banner";
|
||||
export * from "./email-config-switch";
|
||||
export * from "./password-config-switch";
|
||||
export * from "./authentication-method-card";
|
||||
|
||||
@@ -8,4 +8,3 @@ export * from "./empty-state";
|
||||
export * from "./logo-spinner";
|
||||
export * from "./page-header";
|
||||
export * from "./code-block";
|
||||
export * from "./upgrade-button";
|
||||
|
||||
@@ -7,11 +7,7 @@ import { Button } from "@plane/ui";
|
||||
import InstanceFailureDarkImage from "@/public/instance/instance-failure-dark.svg";
|
||||
import InstanceFailureImage from "@/public/instance/instance-failure.svg";
|
||||
|
||||
type InstanceFailureViewProps = {
|
||||
// mutate: () => void;
|
||||
};
|
||||
|
||||
export const InstanceFailureView: FC<InstanceFailureViewProps> = () => {
|
||||
export const InstanceFailureView: FC = () => {
|
||||
const { resolvedTheme } = useTheme();
|
||||
|
||||
const instanceImage = resolvedTheme === "dark" ? InstanceFailureDarkImage : InstanceFailureImage;
|
||||
|
||||
@@ -8,8 +8,16 @@ import { Button, Input, Spinner } from "@plane/ui";
|
||||
// components
|
||||
import { Banner } from "@/components/common";
|
||||
// helpers
|
||||
import {
|
||||
authErrorHandler,
|
||||
EAuthenticationErrorCodes,
|
||||
EErrorAlertType,
|
||||
TAuthErrorInfo,
|
||||
} from "@/helpers/authentication.helper";
|
||||
|
||||
import { API_BASE_URL } from "@/helpers/common.helper";
|
||||
import { AuthService } from "@/services/auth.service";
|
||||
import { AuthBanner } from "../authentication";
|
||||
// ui
|
||||
// icons
|
||||
|
||||
@@ -53,6 +61,7 @@ export const InstanceSignInForm: FC = (props) => {
|
||||
const [csrfToken, setCsrfToken] = useState<string | undefined>(undefined);
|
||||
const [formData, setFormData] = useState<TFormData>(defaultFromData);
|
||||
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||
const [errorInfo, setErrorInfo] = useState<TAuthErrorInfo | undefined>(undefined);
|
||||
|
||||
const handleFormChange = (key: keyof TFormData, value: string | boolean) =>
|
||||
setFormData((prev) => ({ ...prev, [key]: value }));
|
||||
@@ -91,6 +100,15 @@ export const InstanceSignInForm: FC = (props) => {
|
||||
[formData.email, formData.password, isSubmitting]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (errorCode) {
|
||||
const errorDetail = authErrorHandler(errorCode?.toString() as EAuthenticationErrorCodes);
|
||||
if (errorDetail) {
|
||||
setErrorInfo(errorDetail);
|
||||
}
|
||||
}
|
||||
}, [errorCode]);
|
||||
|
||||
return (
|
||||
<div className="flex-grow container mx-auto max-w-lg px-10 lg:max-w-md lg:px-5 py-10 lg:pt-28 transition-all">
|
||||
<div className="relative flex flex-col space-y-6">
|
||||
@@ -103,7 +121,11 @@ export const InstanceSignInForm: FC = (props) => {
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{errorData.type && errorData?.message && <Banner type="error" message={errorData?.message} />}
|
||||
{errorData.type && errorData?.message ? (
|
||||
<Banner type="error" message={errorData?.message} />
|
||||
) : (
|
||||
<>{errorInfo && <AuthBanner bannerData={errorInfo} handleBannerData={(value) => setErrorInfo(value)} />}</>
|
||||
)}
|
||||
|
||||
<form
|
||||
className="space-y-4"
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import React, { useEffect } from "react";
|
||||
|
||||
const useOutsideClickDetector = (ref: React.RefObject<HTMLElement>, callback: () => void) => {
|
||||
const handleClick = (event: MouseEvent) => {
|
||||
if (ref.current && !ref.current.contains(event.target as Node)) {
|
||||
callback();
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
document.addEventListener("mousedown", handleClick);
|
||||
|
||||
return () => {
|
||||
document.removeEventListener("mousedown", handleClick);
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
export default useOutsideClickDetector;
|
||||
@@ -18,6 +18,7 @@ export const AdminLayout: FC<TAdminLayout> = observer((props) => {
|
||||
const { children } = props;
|
||||
// router
|
||||
const router = useRouter();
|
||||
// store hooks
|
||||
const { isUserLoggedIn } = useUser();
|
||||
|
||||
useEffect(() => {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"use client";
|
||||
|
||||
import { ReactNode, createContext } from "react";
|
||||
// store
|
||||
import { RootStore } from "@/store/root.store";
|
||||
// plane admin store
|
||||
import { RootStore } from "@/plane-admin/store/root.store";
|
||||
|
||||
let rootStore = new RootStore();
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// helpers
|
||||
import { API_BASE_URL } from "helpers/common.helper";
|
||||
import { API_BASE_URL } from "@/helpers/common.helper";
|
||||
// services
|
||||
import { APIService } from "@/services/api.service";
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// helpers
|
||||
import { API_BASE_URL } from "helpers/common.helper";
|
||||
// types
|
||||
import type { IUser } from "@plane/types";
|
||||
// helpers
|
||||
import { API_BASE_URL } from "@/helpers/common.helper";
|
||||
// services
|
||||
import { APIService } from "@/services/api.service";
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ import { EInstanceStatus, TInstanceStatus } from "@/helpers/instance.helper";
|
||||
// services
|
||||
import { InstanceService } from "@/services/instance.service";
|
||||
// root store
|
||||
import { RootStore } from "@/store/root.store";
|
||||
import { CoreRootStore } from "@/store/root.store";
|
||||
|
||||
export interface IInstanceStore {
|
||||
// issues
|
||||
@@ -46,7 +46,7 @@ export class InstanceStore implements IInstanceStore {
|
||||
// service
|
||||
instanceService;
|
||||
|
||||
constructor(private store: RootStore) {
|
||||
constructor(private store: CoreRootStore) {
|
||||
makeObservable(this, {
|
||||
// observable
|
||||
isLoading: observable.ref,
|
||||
|
||||
@@ -6,7 +6,7 @@ import { IUserStore, UserStore } from "./user.store";
|
||||
|
||||
enableStaticRendering(typeof window === "undefined");
|
||||
|
||||
export class RootStore {
|
||||
export abstract class CoreRootStore {
|
||||
theme: IThemeStore;
|
||||
instance: IInstanceStore;
|
||||
user: IUserStore;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { action, observable, makeObservable } from "mobx";
|
||||
// root store
|
||||
import { RootStore } from "@/store/root.store";
|
||||
import { CoreRootStore } from "@/store/root.store";
|
||||
|
||||
type TTheme = "dark" | "light";
|
||||
export interface IThemeStore {
|
||||
@@ -21,7 +21,7 @@ export class ThemeStore implements IThemeStore {
|
||||
isSidebarCollapsed: boolean | undefined = undefined;
|
||||
theme: string | undefined = undefined;
|
||||
|
||||
constructor(private store: RootStore) {
|
||||
constructor(private store: CoreRootStore) {
|
||||
makeObservable(this, {
|
||||
// observables
|
||||
isNewUserPopup: observable.ref,
|
||||
|
||||
@@ -6,7 +6,7 @@ import { EUserStatus, TUserStatus } from "@/helpers/user.helper";
|
||||
import { AuthService } from "@/services/auth.service";
|
||||
import { UserService } from "@/services/user.service";
|
||||
// root store
|
||||
import { RootStore } from "@/store/root.store";
|
||||
import { CoreRootStore } from "@/store/root.store";
|
||||
|
||||
export interface IUserStore {
|
||||
// observables
|
||||
@@ -31,7 +31,7 @@ export class UserStore implements IUserStore {
|
||||
userService;
|
||||
authService;
|
||||
|
||||
constructor(private store: RootStore) {
|
||||
constructor(private store: CoreRootStore) {
|
||||
makeObservable(this, {
|
||||
// observables
|
||||
isLoading: observable.ref,
|
||||
|
||||
1
admin/ee/components/common/index.ts
Normal file
1
admin/ee/components/common/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from "ce/components/common";
|
||||
1
admin/ee/store/root.store.ts
Normal file
1
admin/ee/store/root.store.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from "ce/store/root.store";
|
||||
14
admin/helpers/file.helper.ts
Normal file
14
admin/helpers/file.helper.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
// helpers
|
||||
import { API_BASE_URL } from "@/helpers/common.helper";
|
||||
|
||||
/**
|
||||
* @description combine the file path with the base URL
|
||||
* @param {string} path
|
||||
* @returns {string} final URL with the base URL
|
||||
*/
|
||||
export const getFileURL = (path: string): string | undefined => {
|
||||
if (!path) return undefined;
|
||||
const isValidURL = path.startsWith("http");
|
||||
if (isValidURL) return path;
|
||||
return `${API_BASE_URL}${path}`;
|
||||
};
|
||||
21
admin/helpers/string.helper.ts
Normal file
21
admin/helpers/string.helper.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* @description
|
||||
* This function test whether a URL is valid or not.
|
||||
*
|
||||
* It accepts URLs with or without the protocol.
|
||||
* @param {string} url
|
||||
* @returns {boolean}
|
||||
* @example
|
||||
* checkURLValidity("https://example.com") => true
|
||||
* checkURLValidity("example.com") => true
|
||||
* checkURLValidity("example") => false
|
||||
*/
|
||||
export const checkURLValidity = (url: string): boolean => {
|
||||
if (!url) return false;
|
||||
|
||||
// regex to support complex query parameters and fragments
|
||||
const urlPattern =
|
||||
/^(https?:\/\/)?((([a-z\d-]+\.)*[a-z\d-]+\.[a-z]{2,6})|(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}))(:\d+)?(\/[\w.-]*)*(\?[^#\s]*)?(#[\w-]*)?$/i;
|
||||
|
||||
return urlPattern.test(url);
|
||||
};
|
||||
2
admin/next-env.d.ts
vendored
2
admin/next-env.d.ts
vendored
@@ -2,4 +2,4 @@
|
||||
/// <reference types="next/image-types/global" />
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/basic-features/typescript for more information.
|
||||
// see https://nextjs.org/docs/app/building-your-application/configuring/typescript for more information.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "admin",
|
||||
"version": "0.22.0",
|
||||
"version": "0.23.1",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "turbo run develop",
|
||||
@@ -8,43 +8,44 @@
|
||||
"build": "next build",
|
||||
"preview": "next build && next start",
|
||||
"start": "next start",
|
||||
"lint": "next lint"
|
||||
"lint": "eslint . --ext .ts,.tsx",
|
||||
"lint:errors": "eslint . --ext .ts,.tsx --quiet"
|
||||
},
|
||||
"dependencies": {
|
||||
"@headlessui/react": "^1.7.19",
|
||||
"@plane/constants": "*",
|
||||
"@plane/helpers": "*",
|
||||
"@plane/types": "*",
|
||||
"@plane/ui": "*",
|
||||
"@plane/constants": "*",
|
||||
"@sentry/nextjs": "^8.32.0",
|
||||
"@tailwindcss/typography": "^0.5.9",
|
||||
"@types/lodash": "^4.17.0",
|
||||
"autoprefixer": "10.4.14",
|
||||
"axios": "^1.7.4",
|
||||
"js-cookie": "^3.0.5",
|
||||
"lodash": "^4.17.21",
|
||||
"lucide-react": "^0.356.0",
|
||||
"mobx": "^6.12.0",
|
||||
"mobx-react": "^9.1.1",
|
||||
"next": "^14.2.3",
|
||||
"next": "^14.2.12",
|
||||
"next-themes": "^0.2.1",
|
||||
"postcss": "^8.4.38",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-hook-form": "^7.51.0",
|
||||
"react-hook-form": "7.51.5",
|
||||
"swr": "^2.2.4",
|
||||
"tailwindcss": "3.3.2",
|
||||
"uuid": "^9.0.1",
|
||||
"zxcvbn": "^4.4.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/js-cookie": "^3.0.6",
|
||||
"@plane/eslint-config": "*",
|
||||
"@plane/typescript-config": "*",
|
||||
"@types/node": "18.16.1",
|
||||
"@types/react": "^18.2.48",
|
||||
"@types/react": "^18.3.11",
|
||||
"@types/react-dom": "^18.2.18",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"@types/zxcvbn": "^4.4.4",
|
||||
"eslint-config-custom": "*",
|
||||
"tailwind-config-custom": "*",
|
||||
"tsconfig": "*",
|
||||
"typescript": "^5.4.2"
|
||||
"typescript": "5.3.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,21 +1,15 @@
|
||||
{
|
||||
"extends": "tsconfig/nextjs.json",
|
||||
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
|
||||
"exclude": ["node_modules"],
|
||||
"extends": "@plane/typescript-config/nextjs.json",
|
||||
"compilerOptions": {
|
||||
"plugins": [{ "name": "next" }],
|
||||
"baseUrl": ".",
|
||||
"jsx": "preserve",
|
||||
"esModuleInterop": true,
|
||||
"paths": {
|
||||
"@/*": ["core/*"],
|
||||
"@/helpers/*": ["helpers/*"],
|
||||
"@/public/*": ["public/*"],
|
||||
"@/plane-admin/*": ["ce/*"]
|
||||
},
|
||||
"plugins": [
|
||||
{
|
||||
"name": "next"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"include": ["next-env.d.ts", "next.config.js", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
|
||||
@@ -15,12 +15,18 @@ POSTGRES_DB="plane"
|
||||
POSTGRES_PORT=5432
|
||||
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||
|
||||
|
||||
# Redis Settings
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||
|
||||
# RabbitMQ Settings
|
||||
RABBITMQ_HOST="plane-mq"
|
||||
RABBITMQ_PORT="5672"
|
||||
RABBITMQ_USER="plane"
|
||||
RABBITMQ_PASSWORD="plane"
|
||||
RABBITMQ_VHOST="plane"
|
||||
|
||||
# AWS Settings
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
@@ -51,5 +57,6 @@ ADMIN_BASE_URL=
|
||||
SPACE_BASE_URL=
|
||||
APP_BASE_URL=
|
||||
|
||||
|
||||
# Hard delete files after days
|
||||
HARD_DELETE_AFTER_DAYS=
|
||||
HARD_DELETE_AFTER_DAYS=60
|
||||
@@ -1,29 +1,30 @@
|
||||
FROM python:3.11.1-alpine3.17 AS backend
|
||||
FROM python:3.12.5-alpine AS backend
|
||||
|
||||
# set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
ENV INSTANCE_CHANGELOG_URL https://api.plane.so/api/public/anchor/8e1c2e4c7bc5493eb7731be3862f6960/pages/
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
RUN apk --no-cache add \
|
||||
"libpq~=15" \
|
||||
"libxslt~=1.1" \
|
||||
"nodejs-current~=19" \
|
||||
"xmlsec~=1.2"
|
||||
RUN apk add --no-cache \
|
||||
"libpq" \
|
||||
"libxslt" \
|
||||
"nodejs-current" \
|
||||
"xmlsec"
|
||||
|
||||
COPY requirements.txt ./
|
||||
COPY requirements ./requirements
|
||||
RUN apk add --no-cache libffi-dev
|
||||
RUN apk add --no-cache --virtual .build-deps \
|
||||
"bash~=5.2" \
|
||||
"g++~=12.2" \
|
||||
"gcc~=12.2" \
|
||||
"cargo~=1.64" \
|
||||
"git~=2" \
|
||||
"make~=4.3" \
|
||||
"postgresql13-dev~=13" \
|
||||
"g++" \
|
||||
"gcc" \
|
||||
"cargo" \
|
||||
"git" \
|
||||
"make" \
|
||||
"postgresql-dev" \
|
||||
"libc-dev" \
|
||||
"linux-headers" \
|
||||
&& \
|
||||
|
||||
@@ -1,24 +1,25 @@
|
||||
FROM python:3.11.1-alpine3.17 AS backend
|
||||
FROM python:3.12.5-alpine AS backend
|
||||
|
||||
# set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
ENV INSTANCE_CHANGELOG_URL https://api.plane.so/api/public/anchor/8e1c2e4c7bc5493eb7731be3862f6960/pages/
|
||||
|
||||
RUN apk --no-cache add \
|
||||
"bash~=5.2" \
|
||||
"libpq~=15" \
|
||||
"libxslt~=1.1" \
|
||||
"nodejs-current~=19" \
|
||||
"xmlsec~=1.2" \
|
||||
"libpq" \
|
||||
"libxslt" \
|
||||
"nodejs-current" \
|
||||
"xmlsec" \
|
||||
"libffi-dev" \
|
||||
"bash~=5.2" \
|
||||
"g++~=12.2" \
|
||||
"gcc~=12.2" \
|
||||
"cargo~=1.64" \
|
||||
"git~=2" \
|
||||
"make~=4.3" \
|
||||
"postgresql13-dev~=13" \
|
||||
"g++" \
|
||||
"gcc" \
|
||||
"cargo" \
|
||||
"git" \
|
||||
"make" \
|
||||
"postgresql-dev" \
|
||||
"libc-dev" \
|
||||
"linux-headers"
|
||||
|
||||
|
||||
@@ -32,4 +32,3 @@ python manage.py create_bucket
|
||||
python manage.py clear_cache
|
||||
|
||||
python manage.py runserver 0.0.0.0:8000 --settings=plane.settings.local
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{
|
||||
"name": "plane-api",
|
||||
"version": "0.22.0"
|
||||
"version": "0.23.1"
|
||||
}
|
||||
|
||||
@@ -40,3 +40,44 @@ class ApiKeyRateThrottle(SimpleRateThrottle):
|
||||
request.META["X-RateLimit-Reset"] = reset_time
|
||||
|
||||
return allowed
|
||||
|
||||
|
||||
class ServiceTokenRateThrottle(SimpleRateThrottle):
|
||||
scope = "service_token"
|
||||
rate = "300/minute"
|
||||
|
||||
def get_cache_key(self, request, view):
|
||||
# Retrieve the API key from the request header
|
||||
api_key = request.headers.get("X-Api-Key")
|
||||
if not api_key:
|
||||
return None # Allow the request if there's no API key
|
||||
|
||||
# Use the API key as part of the cache key
|
||||
return f"{self.scope}:{api_key}"
|
||||
|
||||
def allow_request(self, request, view):
|
||||
allowed = super().allow_request(request, view)
|
||||
|
||||
if allowed:
|
||||
now = self.timer()
|
||||
# Calculate the remaining limit and reset time
|
||||
history = self.cache.get(self.key, [])
|
||||
|
||||
# Remove old histories
|
||||
while history and history[-1] <= now - self.duration:
|
||||
history.pop()
|
||||
|
||||
# Calculate the requests
|
||||
num_requests = len(history)
|
||||
|
||||
# Check available requests
|
||||
available = self.num_requests - num_requests
|
||||
|
||||
# Unix timestamp for when the rate limit will reset
|
||||
reset_time = int(now + self.duration)
|
||||
|
||||
# Add headers
|
||||
request.META["X-RateLimit-Remaining"] = max(0, available)
|
||||
request.META["X-RateLimit-Reset"] = reset_time
|
||||
|
||||
return allowed
|
||||
@@ -5,11 +5,11 @@ from .issue import (
|
||||
IssueSerializer,
|
||||
LabelSerializer,
|
||||
IssueLinkSerializer,
|
||||
IssueAttachmentSerializer,
|
||||
IssueCommentSerializer,
|
||||
IssueAttachmentSerializer,
|
||||
IssueActivitySerializer,
|
||||
IssueExpandSerializer,
|
||||
IssueLiteSerializer,
|
||||
)
|
||||
from .state import StateLiteSerializer, StateSerializer
|
||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
|
||||
@@ -18,4 +18,4 @@ from .module import (
|
||||
ModuleIssueSerializer,
|
||||
ModuleLiteSerializer,
|
||||
)
|
||||
from .inbox import InboxIssueSerializer
|
||||
from .intake import IntakeIssueSerializer
|
||||
|
||||
@@ -67,6 +67,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
# Import all the expandable serializers
|
||||
from . import (
|
||||
IssueSerializer,
|
||||
IssueLiteSerializer,
|
||||
ProjectLiteSerializer,
|
||||
StateLiteSerializer,
|
||||
UserLiteSerializer,
|
||||
@@ -86,6 +87,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
"actor": UserLiteSerializer,
|
||||
"owned_by": UserLiteSerializer,
|
||||
"members": UserLiteSerializer,
|
||||
"parent": IssueLiteSerializer,
|
||||
}
|
||||
# Check if field in expansion then expand the field
|
||||
if expand in expansion:
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
# Module improts
|
||||
from .base import BaseSerializer
|
||||
from .issue import IssueExpandSerializer
|
||||
from plane.db.models import InboxIssue
|
||||
from plane.db.models import IntakeIssue
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
class InboxIssueSerializer(BaseSerializer):
|
||||
class IntakeIssueSerializer(BaseSerializer):
|
||||
|
||||
issue_detail = IssueExpandSerializer(read_only=True, source="issue")
|
||||
inbox = serializers.UUIDField(source="intake.id", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = InboxIssue
|
||||
model = IntakeIssue
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"id",
|
||||
@@ -1,6 +1,3 @@
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import URLValidator
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from lxml import html
|
||||
@@ -11,9 +8,10 @@ from rest_framework import serializers
|
||||
# Module imports
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueType,
|
||||
IssueActivity,
|
||||
IssueAssignee,
|
||||
IssueAttachment,
|
||||
FileAsset,
|
||||
IssueComment,
|
||||
IssueLabel,
|
||||
IssueLink,
|
||||
@@ -29,6 +27,10 @@ from .module import ModuleLiteSerializer, ModuleSerializer
|
||||
from .state import StateLiteSerializer
|
||||
from .user import UserLiteSerializer
|
||||
|
||||
# Django imports
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import URLValidator
|
||||
|
||||
|
||||
class IssueSerializer(BaseSerializer):
|
||||
assignees = serializers.ListField(
|
||||
@@ -46,6 +48,12 @@ class IssueSerializer(BaseSerializer):
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
type_id = serializers.PrimaryKeyRelatedField(
|
||||
source="type",
|
||||
queryset=IssueType.objects.all(),
|
||||
required=False,
|
||||
allow_null=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
@@ -129,9 +137,19 @@ class IssueSerializer(BaseSerializer):
|
||||
workspace_id = self.context["workspace_id"]
|
||||
default_assignee_id = self.context["default_assignee_id"]
|
||||
|
||||
issue_type = validated_data.pop("type", None)
|
||||
|
||||
if not issue_type:
|
||||
# Get default issue type
|
||||
issue_type = IssueType.objects.filter(
|
||||
project_issue_types__project_id=project_id, is_default=True
|
||||
).first()
|
||||
issue_type = issue_type
|
||||
|
||||
issue = Issue.objects.create(
|
||||
**validated_data,
|
||||
project_id=project_id,
|
||||
type=issue_type,
|
||||
)
|
||||
|
||||
# Issue Audit Users
|
||||
@@ -257,6 +275,17 @@ class IssueSerializer(BaseSerializer):
|
||||
return data
|
||||
|
||||
|
||||
class IssueLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = [
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class LabelSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Label
|
||||
@@ -331,7 +360,7 @@ class IssueLinkSerializer(BaseSerializer):
|
||||
|
||||
class IssueAttachmentSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueAttachment
|
||||
model = FileAsset
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"id",
|
||||
|
||||
@@ -71,6 +71,16 @@ class ModuleSerializer(BaseSerializer):
|
||||
project_id = self.context["project_id"]
|
||||
workspace_id = self.context["workspace_id"]
|
||||
|
||||
module_name = validated_data.get("name")
|
||||
if module_name:
|
||||
# Lookup for the module name in the module table for that project
|
||||
if Module.objects.filter(
|
||||
name=module_name, project_id=project_id
|
||||
).exists():
|
||||
raise serializers.ValidationError(
|
||||
{"error": "Module with this name already exists"}
|
||||
)
|
||||
|
||||
module = Module.objects.create(**validated_data, project_id=project_id)
|
||||
if members is not None:
|
||||
ModuleMember.objects.bulk_create(
|
||||
@@ -93,6 +103,19 @@ class ModuleSerializer(BaseSerializer):
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
members = validated_data.pop("members", None)
|
||||
module_name = validated_data.get("name")
|
||||
if module_name:
|
||||
# Lookup for the module name in the module table for that project
|
||||
if (
|
||||
Module.objects.filter(
|
||||
name=module_name, project=instance.project
|
||||
)
|
||||
.exclude(id=instance.id)
|
||||
.exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
{"error": "Module with this name already exists"}
|
||||
)
|
||||
|
||||
if members is not None:
|
||||
ModuleMember.objects.filter(module=instance).delete()
|
||||
|
||||
@@ -19,6 +19,8 @@ class ProjectSerializer(BaseSerializer):
|
||||
sort_order = serializers.FloatField(read_only=True)
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
is_deployed = serializers.BooleanField(read_only=True)
|
||||
cover_image_url = serializers.CharField(read_only=True)
|
||||
inbox_view = serializers.BooleanField(read_only=True, source="intake_view")
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
@@ -32,6 +34,7 @@ class ProjectSerializer(BaseSerializer):
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"deleted_at",
|
||||
"cover_image_url",
|
||||
]
|
||||
|
||||
def validate(self, data):
|
||||
@@ -87,6 +90,8 @@ class ProjectSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class ProjectLiteSerializer(BaseSerializer):
|
||||
cover_image_url = serializers.CharField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = [
|
||||
@@ -97,5 +102,6 @@ class ProjectLiteSerializer(BaseSerializer):
|
||||
"icon_prop",
|
||||
"emoji",
|
||||
"description",
|
||||
"cover_image_url",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
@@ -13,6 +13,7 @@ class UserLiteSerializer(BaseSerializer):
|
||||
"last_name",
|
||||
"email",
|
||||
"avatar",
|
||||
"avatar_url",
|
||||
"display_name",
|
||||
"email",
|
||||
]
|
||||
|
||||
@@ -3,7 +3,7 @@ from .state import urlpatterns as state_patterns
|
||||
from .issue import urlpatterns as issue_patterns
|
||||
from .cycle import urlpatterns as cycle_patterns
|
||||
from .module import urlpatterns as module_patterns
|
||||
from .inbox import urlpatterns as inbox_patterns
|
||||
from .intake import urlpatterns as intake_patterns
|
||||
from .member import urlpatterns as member_patterns
|
||||
|
||||
urlpatterns = [
|
||||
@@ -12,6 +12,6 @@ urlpatterns = [
|
||||
*issue_patterns,
|
||||
*cycle_patterns,
|
||||
*module_patterns,
|
||||
*inbox_patterns,
|
||||
*intake_patterns,
|
||||
*member_patterns,
|
||||
]
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views import InboxIssueAPIEndpoint
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/",
|
||||
InboxIssueAPIEndpoint.as_view(),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:issue_id>/",
|
||||
InboxIssueAPIEndpoint.as_view(),
|
||||
name="inbox-issue",
|
||||
),
|
||||
]
|
||||
27
apiserver/plane/api/urls/intake.py
Normal file
27
apiserver/plane/api/urls/intake.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from django.urls import path
|
||||
|
||||
from plane.api.views import IntakeIssueAPIEndpoint
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/",
|
||||
IntakeIssueAPIEndpoint.as_view(),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:issue_id>/",
|
||||
IntakeIssueAPIEndpoint.as_view(),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-issues/",
|
||||
IntakeIssueAPIEndpoint.as_view(),
|
||||
name="intake-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-issues/<uuid:issue_id>/",
|
||||
IntakeIssueAPIEndpoint.as_view(),
|
||||
name="intake-issue",
|
||||
),
|
||||
]
|
||||
@@ -27,5 +27,4 @@ from .module import (
|
||||
|
||||
from .member import ProjectMemberAPIEndpoint
|
||||
|
||||
from .inbox import InboxIssueAPIEndpoint
|
||||
|
||||
from .intake import IntakeIssueAPIEndpoint
|
||||
|
||||
@@ -7,6 +7,7 @@ from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.db import IntegrityError
|
||||
from django.urls import resolve
|
||||
from django.utils import timezone
|
||||
from plane.db.models.api import APIToken
|
||||
from rest_framework import status
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
@@ -16,7 +17,7 @@ from rest_framework.views import APIView
|
||||
|
||||
# Module imports
|
||||
from plane.api.middleware.api_authentication import APIKeyAuthentication
|
||||
from plane.api.rate_limit import ApiKeyRateThrottle
|
||||
from plane.api.rate_limit import ApiKeyRateThrottle, ServiceTokenRateThrottle
|
||||
from plane.utils.exception_logger import log_exception
|
||||
from plane.utils.paginator import BasePaginator
|
||||
|
||||
@@ -44,15 +45,29 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
||||
IsAuthenticated,
|
||||
]
|
||||
|
||||
throttle_classes = [
|
||||
ApiKeyRateThrottle,
|
||||
]
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
for backend in list(self.filter_backends):
|
||||
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||
return queryset
|
||||
|
||||
def get_throttles(self):
|
||||
throttle_classes = []
|
||||
api_key = self.request.headers.get("X-Api-Key")
|
||||
|
||||
if api_key:
|
||||
service_token = APIToken.objects.filter(
|
||||
token=api_key,
|
||||
is_service=True,
|
||||
).first()
|
||||
|
||||
if service_token:
|
||||
throttle_classes.append(ServiceTokenRateThrottle())
|
||||
return throttle_classes
|
||||
|
||||
throttle_classes.append(ApiKeyRateThrottle())
|
||||
|
||||
return throttle_classes
|
||||
|
||||
def handle_exception(self, exc):
|
||||
"""
|
||||
Handle any exception that occurs, by returning an appropriate response,
|
||||
@@ -152,4 +167,4 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
||||
for expand in self.request.GET.get("expand", "").split(",")
|
||||
if expand
|
||||
]
|
||||
return expand if expand else None
|
||||
return expand if expand else None
|
||||
@@ -13,8 +13,12 @@ from django.db.models import (
|
||||
Q,
|
||||
Sum,
|
||||
FloatField,
|
||||
Case,
|
||||
When,
|
||||
Value,
|
||||
)
|
||||
from django.db.models.functions import Cast
|
||||
from django.db.models.functions import Cast, Concat
|
||||
from django.db import models
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
@@ -26,13 +30,13 @@ from plane.api.serializers import (
|
||||
CycleSerializer,
|
||||
)
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
CycleIssue,
|
||||
Issue,
|
||||
Project,
|
||||
IssueAttachment,
|
||||
FileAsset,
|
||||
IssueLink,
|
||||
ProjectMember,
|
||||
UserFavorite,
|
||||
@@ -74,6 +78,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
filter=Q(
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -84,6 +89,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -94,6 +100,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="cancelled",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -104,6 +111,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -114,6 +122,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="unstarted",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -124,6 +133,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="backlog",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -207,8 +217,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
# Incomplete Cycles
|
||||
if cycle_view == "incomplete":
|
||||
queryset = queryset.filter(
|
||||
Q(end_date__gte=timezone.now().date())
|
||||
| Q(end_date__isnull=True),
|
||||
Q(end_date__gte=timezone.now()) | Q(end_date__isnull=True),
|
||||
)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
@@ -309,10 +318,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
|
||||
request_data = request.data
|
||||
|
||||
if (
|
||||
cycle.end_date is not None
|
||||
and cycle.end_date < timezone.now().date()
|
||||
):
|
||||
if cycle.end_date is not None and cycle.end_date < timezone.now():
|
||||
if "sort_order" in request_data:
|
||||
# Can only change sort order
|
||||
request_data = {
|
||||
@@ -405,10 +411,6 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
# Delete the cycle
|
||||
cycle.delete()
|
||||
# Delete the cycle issues
|
||||
CycleIssue.objects.filter(
|
||||
cycle_id=self.kwargs.get("pk"),
|
||||
).delete()
|
||||
# Delete the user favorite cycle
|
||||
UserFavorite.objects.filter(
|
||||
entity_type="cycle",
|
||||
@@ -441,6 +443,7 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
filter=Q(
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -451,6 +454,7 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -461,6 +465,7 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="cancelled",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -471,6 +476,7 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -481,6 +487,7 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="unstarted",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -491,6 +498,7 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="backlog",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -504,6 +512,7 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -514,6 +523,7 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -537,13 +547,19 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
cycle = Cycle.objects.get(
|
||||
pk=cycle_id, project_id=project_id, workspace__slug=slug
|
||||
)
|
||||
if cycle.end_date >= timezone.now().date():
|
||||
if cycle.end_date >= timezone.now():
|
||||
return Response(
|
||||
{"error": "Only completed cycles can be archived"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
cycle.archived_at = timezone.now()
|
||||
cycle.save()
|
||||
UserFavorite.objects.filter(
|
||||
entity_type="cycle",
|
||||
entity_identifier=cycle_id,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
).delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def delete(self, request, slug, project_id, cycle_id):
|
||||
@@ -613,7 +629,10 @@ class CycleIssueAPIEndpoint(BaseAPIView):
|
||||
# List
|
||||
order_by = request.GET.get("order_by", "created_at")
|
||||
issues = (
|
||||
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(
|
||||
parent=OuterRef("id")
|
||||
@@ -639,8 +658,9 @@ class CycleIssueAPIEndpoint(BaseAPIView):
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
attachment_count=FileAsset.objects.filter(
|
||||
issue_id=OuterRef("id"),
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
@@ -809,6 +829,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
filter=Q(
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -819,6 +840,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -829,6 +851,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="cancelled",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -839,6 +862,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -849,6 +873,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="unstarted",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -859,6 +884,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="backlog",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -875,13 +901,34 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
assignee_estimate_data = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.values("display_name", "assignee_id", "avatar")
|
||||
.annotate(
|
||||
avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(
|
||||
assignees__avatar_asset__isnull=True,
|
||||
then="assignees__avatar",
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.values("display_name", "assignee_id", "avatar", "avatar_url")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
@@ -918,7 +965,8 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
if item["assignee_id"]
|
||||
else None
|
||||
),
|
||||
"avatar": item["avatar"],
|
||||
"avatar": item.get("avatar", None),
|
||||
"avatar_url": item.get("avatar_url", None),
|
||||
"total_estimates": item["total_estimates"],
|
||||
"completed_estimates": item["completed_estimates"],
|
||||
"pending_estimates": item["pending_estimates"],
|
||||
@@ -929,6 +977,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
label_distribution_data = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
@@ -990,13 +1039,34 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
assignee_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.values("display_name", "assignee_id", "avatar")
|
||||
.annotate(
|
||||
avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(
|
||||
assignees__avatar_asset__isnull=True,
|
||||
then="assignees__avatar",
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.values("display_name", "assignee_id", "avatar_url")
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"id",
|
||||
@@ -1035,7 +1105,8 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
"assignee_id": (
|
||||
str(item["assignee_id"]) if item["assignee_id"] else None
|
||||
),
|
||||
"avatar": item["avatar"],
|
||||
"avatar": item.get("avatar", None),
|
||||
"avatar_url": item.get("avatar_url", None),
|
||||
"total_issues": item["total_issues"],
|
||||
"completed_issues": item["completed_issues"],
|
||||
"pending_issues": item["pending_issues"],
|
||||
@@ -1047,6 +1118,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
label_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
@@ -1140,7 +1212,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
if (
|
||||
new_cycle.end_date is not None
|
||||
and new_cycle.end_date < timezone.now().date()
|
||||
and new_cycle.end_date < timezone.now()
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
# Django improts
|
||||
# Django imports
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q, Value, UUIDField
|
||||
@@ -14,12 +14,12 @@ from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.api.serializers import InboxIssueSerializer, IssueSerializer
|
||||
from plane.api.serializers import IntakeIssueSerializer, IssueSerializer
|
||||
from plane.app.permissions import ProjectLitePermission
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Inbox,
|
||||
InboxIssue,
|
||||
Intake,
|
||||
IntakeIssue,
|
||||
Issue,
|
||||
Project,
|
||||
ProjectMember,
|
||||
@@ -29,10 +29,10 @@ from plane.db.models import (
|
||||
from .base import BaseAPIView
|
||||
|
||||
|
||||
class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
class IntakeIssueAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||
`update` and `destroy` actions related to inbox issues.
|
||||
`update` and `destroy` actions related to intake issues.
|
||||
|
||||
"""
|
||||
|
||||
@@ -40,15 +40,15 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
ProjectLitePermission,
|
||||
]
|
||||
|
||||
serializer_class = InboxIssueSerializer
|
||||
model = InboxIssue
|
||||
serializer_class = IntakeIssueSerializer
|
||||
model = IntakeIssue
|
||||
|
||||
filterset_fields = [
|
||||
"status",
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
inbox = Inbox.objects.filter(
|
||||
intake = Intake.objects.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
).first()
|
||||
@@ -58,16 +58,16 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
pk=self.kwargs.get("project_id"),
|
||||
)
|
||||
|
||||
if inbox is None and not project.inbox_view:
|
||||
return InboxIssue.objects.none()
|
||||
if intake is None and not project.intake_view:
|
||||
return IntakeIssue.objects.none()
|
||||
|
||||
return (
|
||||
InboxIssue.objects.filter(
|
||||
IntakeIssue.objects.filter(
|
||||
Q(snoozed_till__gte=timezone.now())
|
||||
| Q(snoozed_till__isnull=True),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
inbox_id=inbox.id,
|
||||
intake_id=intake.id,
|
||||
)
|
||||
.select_related("issue", "workspace", "project")
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
@@ -75,22 +75,22 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
def get(self, request, slug, project_id, issue_id=None):
|
||||
if issue_id:
|
||||
inbox_issue_queryset = self.get_queryset().get(issue_id=issue_id)
|
||||
inbox_issue_data = InboxIssueSerializer(
|
||||
inbox_issue_queryset,
|
||||
intake_issue_queryset = self.get_queryset().get(issue_id=issue_id)
|
||||
intake_issue_data = IntakeIssueSerializer(
|
||||
intake_issue_queryset,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data
|
||||
return Response(
|
||||
inbox_issue_data,
|
||||
intake_issue_data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
issue_queryset = self.get_queryset()
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(issue_queryset),
|
||||
on_results=lambda inbox_issues: InboxIssueSerializer(
|
||||
inbox_issues,
|
||||
on_results=lambda intake_issues: IntakeIssueSerializer(
|
||||
intake_issues,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
@@ -104,7 +104,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
inbox = Inbox.objects.filter(
|
||||
intake = Intake.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).first()
|
||||
|
||||
@@ -113,11 +113,11 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
pk=project_id,
|
||||
)
|
||||
|
||||
# Inbox view
|
||||
if inbox is None and not project.inbox_view:
|
||||
# Intake view
|
||||
if intake is None and not project.intake_view:
|
||||
return Response(
|
||||
{
|
||||
"error": "Inbox is not enabled for this project enable it through the project's api"
|
||||
"error": "Intake is not enabled for this project enable it through the project's api"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
@@ -139,7 +139,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
state, _ = State.objects.get_or_create(
|
||||
name="Triage",
|
||||
group="triage",
|
||||
description="Default state for managing all Inbox Issues",
|
||||
description="Default state for managing all Intake Issues",
|
||||
project_id=project_id,
|
||||
color="#ff7700",
|
||||
is_triage=True,
|
||||
@@ -157,12 +157,12 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
state=state,
|
||||
)
|
||||
|
||||
# create an inbox issue
|
||||
inbox_issue = InboxIssue.objects.create(
|
||||
inbox_id=inbox.id,
|
||||
# create an intake issue
|
||||
intake_issue = IntakeIssue.objects.create(
|
||||
intake_id=intake.id,
|
||||
project_id=project_id,
|
||||
issue=issue,
|
||||
source=request.data.get("source", "in-app"),
|
||||
source=request.data.get("source", "IN-APP"),
|
||||
)
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
@@ -173,14 +173,14 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
inbox=str(inbox_issue.id),
|
||||
intake=str(intake_issue.id),
|
||||
)
|
||||
|
||||
serializer = InboxIssueSerializer(inbox_issue)
|
||||
serializer = IntakeIssueSerializer(intake_issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def patch(self, request, slug, project_id, issue_id):
|
||||
inbox = Inbox.objects.filter(
|
||||
intake = Intake.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).first()
|
||||
|
||||
@@ -189,21 +189,21 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
pk=project_id,
|
||||
)
|
||||
|
||||
# Inbox view
|
||||
if inbox is None and not project.inbox_view:
|
||||
# Intake view
|
||||
if intake is None and not project.intake_view:
|
||||
return Response(
|
||||
{
|
||||
"error": "Inbox is not enabled for this project enable it through the project's api"
|
||||
"error": "Intake is not enabled for this project enable it through the project's api"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the inbox issue
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
# Get the intake issue
|
||||
intake_issue = IntakeIssue.objects.get(
|
||||
issue_id=issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
inbox_id=inbox.id,
|
||||
intake_id=intake.id,
|
||||
)
|
||||
|
||||
# Get the project member
|
||||
@@ -215,11 +215,11 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
# Only project members admins and created_by users can access this endpoint
|
||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(
|
||||
if project_member.role <= 5 and str(intake_issue.created_by_id) != str(
|
||||
request.user.id
|
||||
):
|
||||
return Response(
|
||||
{"error": "You cannot edit inbox issues"},
|
||||
{"error": "You cannot edit intake issues"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -232,7 +232,10 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
filter=Q(
|
||||
~Q(labels__id__isnull=True)
|
||||
& Q(label_issue__deleted_at__isnull=True),
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
@@ -240,7 +243,11 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
filter=Q(
|
||||
~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True)
|
||||
& Q(issue_assignee__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
@@ -249,9 +256,8 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
# Only allow guests and viewers to edit name and description
|
||||
if project_member.role <= 10:
|
||||
# viewers and guests since only viewers and guests
|
||||
# Only allow guests to edit name and description
|
||||
if project_member.role <= 5:
|
||||
issue_data = {
|
||||
"name": issue_data.get("name", issue.name),
|
||||
"description_html": issue_data.get(
|
||||
@@ -282,7 +288,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
inbox=(inbox_issue.id),
|
||||
intake=(intake_issue.id),
|
||||
)
|
||||
issue_serializer.save()
|
||||
else:
|
||||
@@ -290,13 +296,13 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Only project admins and members can edit inbox issue attributes
|
||||
if project_member.role > 10:
|
||||
serializer = InboxIssueSerializer(
|
||||
inbox_issue, data=request.data, partial=True
|
||||
# Only project admins and members can edit intake issue attributes
|
||||
if project_member.role > 15:
|
||||
serializer = IntakeIssueSerializer(
|
||||
intake_issue, data=request.data, partial=True
|
||||
)
|
||||
current_instance = json.dumps(
|
||||
InboxIssueSerializer(inbox_issue).data, cls=DjangoJSONEncoder
|
||||
IntakeIssueSerializer(intake_issue).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
@@ -339,7 +345,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
# create a activity for status change
|
||||
issue_activity.delay(
|
||||
type="inbox.activity.created",
|
||||
type="intake.activity.created",
|
||||
requested_data=json.dumps(
|
||||
request.data, cls=DjangoJSONEncoder
|
||||
),
|
||||
@@ -350,7 +356,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=False,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
inbox=str(inbox_issue.id),
|
||||
intake=str(intake_issue.id),
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -359,12 +365,12 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
else:
|
||||
return Response(
|
||||
InboxIssueSerializer(inbox_issue).data,
|
||||
IntakeIssueSerializer(intake_issue).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def delete(self, request, slug, project_id, issue_id):
|
||||
inbox = Inbox.objects.filter(
|
||||
intake = Intake.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).first()
|
||||
|
||||
@@ -373,25 +379,25 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
pk=project_id,
|
||||
)
|
||||
|
||||
# Inbox view
|
||||
if inbox is None and not project.inbox_view:
|
||||
# Intake view
|
||||
if intake is None and not project.intake_view:
|
||||
return Response(
|
||||
{
|
||||
"error": "Inbox is not enabled for this project enable it through the project's api"
|
||||
"error": "Intake is not enabled for this project enable it through the project's api"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the inbox issue
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
# Get the intake issue
|
||||
intake_issue = IntakeIssue.objects.get(
|
||||
issue_id=issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
inbox_id=inbox.id,
|
||||
intake_id=intake.id,
|
||||
)
|
||||
|
||||
# Check the issue status
|
||||
if inbox_issue.status in [-2, -1, 0, 2]:
|
||||
if intake_issue.status in [-2, -1, 0, 2]:
|
||||
# Delete the issue also
|
||||
issue = Issue.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk=issue_id
|
||||
@@ -411,5 +417,5 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
issue.delete()
|
||||
|
||||
inbox_issue.delete()
|
||||
intake_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -16,6 +16,7 @@ from django.db.models import (
|
||||
Q,
|
||||
Value,
|
||||
When,
|
||||
Subquery,
|
||||
)
|
||||
from django.utils import timezone
|
||||
|
||||
@@ -38,16 +39,17 @@ from plane.app.permissions import (
|
||||
ProjectLitePermission,
|
||||
ProjectMemberPermission,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueActivity,
|
||||
IssueAttachment,
|
||||
FileAsset,
|
||||
IssueComment,
|
||||
IssueLink,
|
||||
Label,
|
||||
Project,
|
||||
ProjectMember,
|
||||
CycleIssue,
|
||||
)
|
||||
|
||||
from .base import BaseAPIView
|
||||
@@ -202,7 +204,13 @@ class IssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
issue_queryset = (
|
||||
self.get_queryset()
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(
|
||||
cycle_id=Subquery(
|
||||
CycleIssue.objects.filter(
|
||||
issue=OuterRef("id"), deleted_at__isnull=True
|
||||
).values("cycle_id")[:1]
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
@@ -210,8 +218,9 @@ class IssueAPIEndpoint(BaseAPIView):
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
attachment_count=FileAsset.objects.filter(
|
||||
issue_id=OuterRef("id"),
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
@@ -355,6 +364,124 @@ class IssueAPIEndpoint(BaseAPIView):
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def put(self, request, slug, project_id):
|
||||
# Get the entities required for putting the issue, external_id and
|
||||
# external_source are must to identify the issue here
|
||||
project = Project.objects.get(pk=project_id)
|
||||
external_id = request.data.get("external_id")
|
||||
external_source = request.data.get("external_source")
|
||||
|
||||
# If the external_id and source are present, we need to find the exact
|
||||
# issue that needs to be updated with the provided external_id and
|
||||
# external_source
|
||||
if external_id and external_source:
|
||||
try:
|
||||
issue = Issue.objects.get(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
external_id=external_id,
|
||||
external_source=external_source,
|
||||
)
|
||||
|
||||
# Get the current instance of the issue in order to track
|
||||
# changes and dispatch the issue activity
|
||||
current_instance = json.dumps(
|
||||
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
# Get the requested data, encode it as django object and pass it
|
||||
# to serializer to validation
|
||||
requested_data = json.dumps(
|
||||
self.request.data, cls=DjangoJSONEncoder
|
||||
)
|
||||
serializer = IssueSerializer(
|
||||
issue,
|
||||
data=request.data,
|
||||
context={
|
||||
"project_id": project_id,
|
||||
"workspace_id": project.workspace_id,
|
||||
},
|
||||
partial=True,
|
||||
)
|
||||
if serializer.is_valid():
|
||||
# If the serializer is valid, save the issue and dispatch
|
||||
# the update issue activity worker event.
|
||||
serializer.save()
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(
|
||||
# If the serializer is not valid, respond with 400 bad
|
||||
# request
|
||||
serializer.errors,
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Issue.DoesNotExist:
|
||||
# If the issue does not exist, a new record needs to be created
|
||||
# for the requested data.
|
||||
# Serialize the data with the context of the project and
|
||||
# workspace
|
||||
serializer = IssueSerializer(
|
||||
data=request.data,
|
||||
context={
|
||||
"project_id": project_id,
|
||||
"workspace_id": project.workspace_id,
|
||||
"default_assignee_id": project.default_assignee_id,
|
||||
},
|
||||
)
|
||||
|
||||
# If the serializer is valid, save the issue and dispatch the
|
||||
# issue activity worker event as created
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
# Refetch the issue
|
||||
issue = Issue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
pk=serializer.data["id"],
|
||||
).first()
|
||||
|
||||
# If any of the created_at or created_by is present, update
|
||||
# the issue with the provided data, else return with the
|
||||
# default states given.
|
||||
issue.created_at = request.data.get(
|
||||
"created_at", timezone.now()
|
||||
)
|
||||
issue.created_by_id = request.data.get(
|
||||
"created_by", request.user.id
|
||||
)
|
||||
issue.save(update_fields=["created_at", "created_by"])
|
||||
|
||||
issue_activity.delay(
|
||||
type="issue.activity.created",
|
||||
requested_data=json.dumps(
|
||||
self.request.data, cls=DjangoJSONEncoder
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(serializer.data.get("id", None)),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(
|
||||
serializer.data, status=status.HTTP_201_CREATED
|
||||
)
|
||||
return Response(
|
||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
else:
|
||||
return Response(
|
||||
{"error": "external_id and external_source are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def patch(self, request, slug, project_id, pk=None):
|
||||
issue = Issue.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
@@ -944,7 +1071,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
model = IssueAttachment
|
||||
model = FileAsset
|
||||
parser_classes = (MultiPartParser, FormParser)
|
||||
|
||||
def post(self, request, slug, project_id, issue_id):
|
||||
@@ -952,7 +1079,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
if (
|
||||
request.data.get("external_id")
|
||||
and request.data.get("external_source")
|
||||
and IssueAttachment.objects.filter(
|
||||
and FileAsset.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
issue_id=issue_id,
|
||||
@@ -960,7 +1087,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
external_id=request.data.get("external_id"),
|
||||
).exists()
|
||||
):
|
||||
issue_attachment = IssueAttachment.objects.filter(
|
||||
issue_attachment = FileAsset.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
external_id=request.data.get("external_id"),
|
||||
@@ -994,7 +1121,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request, slug, project_id, issue_id, pk):
|
||||
issue_attachment = IssueAttachment.objects.get(pk=pk)
|
||||
issue_attachment = FileAsset.objects.get(pk=pk)
|
||||
issue_attachment.asset.delete(save=False)
|
||||
issue_attachment.delete()
|
||||
issue_activity.delay(
|
||||
@@ -1012,7 +1139,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def get(self, request, slug, project_id, issue_id):
|
||||
issue_attachments = IssueAttachment.objects.filter(
|
||||
issue_attachments = FileAsset.objects.filter(
|
||||
issue_id=issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = IssueAttachmentSerializer(issue_attachments, many=True)
|
||||
|
||||
@@ -133,7 +133,7 @@ class ProjectMemberAPIEndpoint(BaseAPIView):
|
||||
workspace_member = WorkspaceMember.objects.create(
|
||||
workspace=workspace,
|
||||
member=user,
|
||||
role=request.data.get("role", 10),
|
||||
role=request.data.get("role", 5),
|
||||
)
|
||||
workspace_member.save()
|
||||
|
||||
@@ -142,7 +142,7 @@ class ProjectMemberAPIEndpoint(BaseAPIView):
|
||||
project_member = ProjectMember.objects.create(
|
||||
project=project,
|
||||
member=user,
|
||||
role=request.data.get("role", 10),
|
||||
role=request.data.get("role", 5),
|
||||
)
|
||||
project_member.save()
|
||||
|
||||
|
||||
@@ -18,10 +18,10 @@ from plane.api.serializers import (
|
||||
ModuleSerializer,
|
||||
)
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueAttachment,
|
||||
FileAsset,
|
||||
IssueLink,
|
||||
Module,
|
||||
ModuleIssue,
|
||||
@@ -71,6 +71,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
filter=Q(
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
),
|
||||
distinct=True,
|
||||
),
|
||||
@@ -82,6 +83,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
issue_module__issue__state__group="completed",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
),
|
||||
distinct=True,
|
||||
)
|
||||
@@ -93,6 +95,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
issue_module__issue__state__group="cancelled",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
),
|
||||
distinct=True,
|
||||
)
|
||||
@@ -104,6 +107,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
issue_module__issue__state__group="started",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
),
|
||||
distinct=True,
|
||||
)
|
||||
@@ -115,6 +119,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
issue_module__issue__state__group="unstarted",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
),
|
||||
distinct=True,
|
||||
)
|
||||
@@ -126,6 +131,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
issue_module__issue__state__group="backlog",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
),
|
||||
distinct=True,
|
||||
)
|
||||
@@ -298,7 +304,11 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"module_name": str(module.name),
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
module.delete()
|
||||
@@ -363,7 +373,10 @@ class ModuleIssueAPIEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, project_id, module_id):
|
||||
order_by = request.GET.get("order_by", "created_at")
|
||||
issues = (
|
||||
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
||||
Issue.issue_objects.filter(
|
||||
issue_module__module_id=module_id,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(
|
||||
parent=OuterRef("id")
|
||||
@@ -389,8 +402,9 @@ class ModuleIssueAPIEndpoint(BaseAPIView):
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
attachment_count=FileAsset.objects.filter(
|
||||
issue_id=OuterRef("id"),
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
@@ -520,7 +534,6 @@ class ModuleIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
|
||||
class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
@@ -548,6 +561,7 @@ class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
filter=Q(
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
),
|
||||
distinct=True,
|
||||
),
|
||||
@@ -559,6 +573,7 @@ class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
issue_module__issue__state__group="completed",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
),
|
||||
distinct=True,
|
||||
)
|
||||
@@ -570,6 +585,7 @@ class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
issue_module__issue__state__group="cancelled",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
),
|
||||
distinct=True,
|
||||
)
|
||||
@@ -581,6 +597,7 @@ class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
issue_module__issue__state__group="started",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
),
|
||||
distinct=True,
|
||||
)
|
||||
@@ -592,6 +609,7 @@ class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
issue_module__issue__state__group="unstarted",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
),
|
||||
distinct=True,
|
||||
)
|
||||
@@ -603,6 +621,7 @@ class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
issue_module__issue__state__group="backlog",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
),
|
||||
distinct=True,
|
||||
)
|
||||
@@ -635,6 +654,12 @@ class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
module.archived_at = timezone.now()
|
||||
module.save()
|
||||
UserFavorite.objects.filter(
|
||||
entity_type="module",
|
||||
entity_identifier=pk,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
).delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def delete(self, request, slug, project_id, pk):
|
||||
|
||||
@@ -18,7 +18,7 @@ from plane.app.permissions import ProjectBasePermission
|
||||
# Module imports
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
Inbox,
|
||||
Intake,
|
||||
IssueUserProperty,
|
||||
Module,
|
||||
Project,
|
||||
@@ -285,6 +285,11 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
current_instance = json.dumps(
|
||||
ProjectSerializer(project).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
intake_view = request.data.get(
|
||||
"inbox_view", request.data.get("intake_view", False)
|
||||
)
|
||||
|
||||
if project.archived_at:
|
||||
return Response(
|
||||
{"error": "Archived project cannot be updated"},
|
||||
@@ -293,25 +298,33 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
|
||||
serializer = ProjectSerializer(
|
||||
project,
|
||||
data={**request.data},
|
||||
data={
|
||||
**request.data,
|
||||
"intake_view": intake_view,
|
||||
},
|
||||
context={"workspace_id": workspace.id},
|
||||
partial=True,
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
if serializer.data["inbox_view"]:
|
||||
Inbox.objects.get_or_create(
|
||||
name=f"{project.name} Inbox",
|
||||
if serializer.data["intake_view"]:
|
||||
intake = Intake.objects.filter(
|
||||
project=project,
|
||||
is_default=True,
|
||||
)
|
||||
).first()
|
||||
if not intake:
|
||||
Intake.objects.create(
|
||||
name=f"{project.name} Intake",
|
||||
project=project,
|
||||
is_default=True,
|
||||
)
|
||||
|
||||
# Create the triage state in Backlog group
|
||||
State.objects.get_or_create(
|
||||
name="Triage",
|
||||
group="triage",
|
||||
description="Default state for managing all Inbox Issues",
|
||||
description="Default state for managing all Intake Issues",
|
||||
project_id=pk,
|
||||
color="#ff7700",
|
||||
is_triage=True,
|
||||
@@ -377,6 +390,10 @@ class ProjectArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
project.archived_at = timezone.now()
|
||||
project.save()
|
||||
UserFavorite.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project=project_id,
|
||||
).delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def delete(self, request, slug, project_id):
|
||||
|
||||
@@ -12,3 +12,4 @@ from .project import (
|
||||
ProjectMemberPermission,
|
||||
ProjectLitePermission,
|
||||
)
|
||||
from .base import allow_permission, ROLE
|
||||
60
apiserver/plane/app/permissions/base.py
Normal file
60
apiserver/plane/app/permissions/base.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from plane.db.models import WorkspaceMember, ProjectMember
|
||||
from functools import wraps
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from enum import Enum
|
||||
|
||||
class ROLE(Enum):
|
||||
ADMIN = 20
|
||||
MEMBER = 15
|
||||
GUEST = 5
|
||||
|
||||
|
||||
def allow_permission(allowed_roles, level="PROJECT", creator=False, model=None):
|
||||
def decorator(view_func):
|
||||
@wraps(view_func)
|
||||
def _wrapped_view(instance, request, *args, **kwargs):
|
||||
|
||||
# Check for creator if required
|
||||
if creator and model:
|
||||
obj = model.objects.filter(
|
||||
id=kwargs["pk"], created_by=request.user
|
||||
).exists()
|
||||
if obj:
|
||||
return view_func(instance, request, *args, **kwargs)
|
||||
|
||||
# Convert allowed_roles to their values if they are enum members
|
||||
allowed_role_values = [
|
||||
role.value if isinstance(role, ROLE) else role
|
||||
for role in allowed_roles
|
||||
]
|
||||
|
||||
# Check role permissions
|
||||
if level == "WORKSPACE":
|
||||
if WorkspaceMember.objects.filter(
|
||||
member=request.user,
|
||||
workspace__slug=kwargs["slug"],
|
||||
role__in=allowed_role_values,
|
||||
is_active=True,
|
||||
).exists():
|
||||
return view_func(instance, request, *args, **kwargs)
|
||||
else:
|
||||
if ProjectMember.objects.filter(
|
||||
member=request.user,
|
||||
workspace__slug=kwargs["slug"],
|
||||
project_id=kwargs["project_id"],
|
||||
role__in=allowed_role_values,
|
||||
is_active=True,
|
||||
).exists():
|
||||
return view_func(instance, request, *args, **kwargs)
|
||||
|
||||
# Return permission denied if no conditions are met
|
||||
return Response(
|
||||
{"error": "You don't have the required permissions."},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
return _wrapped_view
|
||||
|
||||
return decorator
|
||||
@@ -7,7 +7,6 @@ from plane.db.models import ProjectMember, WorkspaceMember
|
||||
# Permission Mappings
|
||||
Admin = 20
|
||||
Member = 15
|
||||
Viewer = 10
|
||||
Guest = 5
|
||||
|
||||
|
||||
|
||||
@@ -6,9 +6,8 @@ from plane.db.models import WorkspaceMember
|
||||
|
||||
|
||||
# Permission Mappings
|
||||
Owner = 20
|
||||
Admin = 15
|
||||
Member = 10
|
||||
Admin = 20
|
||||
Member = 15
|
||||
Guest = 5
|
||||
|
||||
|
||||
@@ -31,7 +30,7 @@ class WorkSpaceBasePermission(BasePermission):
|
||||
return WorkspaceMember.objects.filter(
|
||||
member=request.user,
|
||||
workspace__slug=view.workspace_slug,
|
||||
role__in=[Owner, Admin],
|
||||
role__in=[Admin, Member],
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
@@ -40,7 +39,7 @@ class WorkSpaceBasePermission(BasePermission):
|
||||
return WorkspaceMember.objects.filter(
|
||||
member=request.user,
|
||||
workspace__slug=view.workspace_slug,
|
||||
role=Owner,
|
||||
role=Admin,
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
@@ -53,7 +52,7 @@ class WorkspaceOwnerPermission(BasePermission):
|
||||
return WorkspaceMember.objects.filter(
|
||||
workspace__slug=view.workspace_slug,
|
||||
member=request.user,
|
||||
role=Owner,
|
||||
role=Admin,
|
||||
).exists()
|
||||
|
||||
|
||||
@@ -65,7 +64,7 @@ class WorkSpaceAdminPermission(BasePermission):
|
||||
return WorkspaceMember.objects.filter(
|
||||
member=request.user,
|
||||
workspace__slug=view.workspace_slug,
|
||||
role__in=[Owner, Admin],
|
||||
role__in=[Admin, Member],
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
@@ -86,7 +85,7 @@ class WorkspaceEntityPermission(BasePermission):
|
||||
return WorkspaceMember.objects.filter(
|
||||
member=request.user,
|
||||
workspace__slug=view.workspace_slug,
|
||||
role__in=[Owner, Admin],
|
||||
role__in=[Admin, Member],
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ from .issue import (
|
||||
IssueFlatSerializer,
|
||||
IssueStateSerializer,
|
||||
IssueLinkSerializer,
|
||||
IssueInboxSerializer,
|
||||
IssueIntakeSerializer,
|
||||
IssueLiteSerializer,
|
||||
IssueAttachmentSerializer,
|
||||
IssueSubscriberSerializer,
|
||||
@@ -92,6 +92,7 @@ from .page import (
|
||||
SubPageSerializer,
|
||||
PageDetailSerializer,
|
||||
PageVersionSerializer,
|
||||
PageVersionDetailSerializer,
|
||||
)
|
||||
|
||||
from .estimate import (
|
||||
@@ -101,12 +102,12 @@ from .estimate import (
|
||||
WorkspaceEstimateSerializer,
|
||||
)
|
||||
|
||||
from .inbox import (
|
||||
InboxSerializer,
|
||||
InboxIssueSerializer,
|
||||
IssueStateInboxSerializer,
|
||||
InboxIssueLiteSerializer,
|
||||
InboxIssueDetailSerializer,
|
||||
from .intake import (
|
||||
IntakeSerializer,
|
||||
IntakeIssueSerializer,
|
||||
IssueStateIntakeSerializer,
|
||||
IntakeIssueLiteSerializer,
|
||||
IntakeIssueDetailSerializer,
|
||||
)
|
||||
|
||||
from .analytic import AnalyticViewSerializer
|
||||
@@ -123,3 +124,9 @@ from .webhook import WebhookSerializer, WebhookLogSerializer
|
||||
from .dashboard import DashboardSerializer, WidgetSerializer
|
||||
|
||||
from .favorite import UserFavoriteSerializer
|
||||
|
||||
from .draft import (
|
||||
DraftIssueCreateSerializer,
|
||||
DraftIssueSerializer,
|
||||
DraftIssueDetailSerializer,
|
||||
)
|
||||
|
||||
@@ -60,10 +60,10 @@ class DynamicBaseSerializer(BaseSerializer):
|
||||
CycleIssueSerializer,
|
||||
IssueLiteSerializer,
|
||||
IssueRelationSerializer,
|
||||
InboxIssueLiteSerializer,
|
||||
IntakeIssueLiteSerializer,
|
||||
IssueReactionLiteSerializer,
|
||||
IssueAttachmentLiteSerializer,
|
||||
IssueLinkLiteSerializer,
|
||||
RelatedIssueSerializer,
|
||||
)
|
||||
|
||||
# Expansion mapper
|
||||
@@ -84,13 +84,14 @@ class DynamicBaseSerializer(BaseSerializer):
|
||||
"issue_cycle": CycleIssueSerializer,
|
||||
"parent": IssueLiteSerializer,
|
||||
"issue_relation": IssueRelationSerializer,
|
||||
"issue_inbox": InboxIssueLiteSerializer,
|
||||
"issue_intake": IntakeIssueLiteSerializer,
|
||||
"issue_related": RelatedIssueSerializer,
|
||||
"issue_reactions": IssueReactionLiteSerializer,
|
||||
"issue_attachment": IssueAttachmentLiteSerializer,
|
||||
"issue_link": IssueLinkLiteSerializer,
|
||||
"sub_issues": IssueLiteSerializer,
|
||||
}
|
||||
|
||||
if field not in self.fields and field in expansion:
|
||||
self.fields[field] = expansion[field](
|
||||
many=(
|
||||
True
|
||||
@@ -101,11 +102,12 @@ class DynamicBaseSerializer(BaseSerializer):
|
||||
"labels",
|
||||
"issue_cycle",
|
||||
"issue_relation",
|
||||
"issue_inbox",
|
||||
"issue_intake",
|
||||
"issue_reactions",
|
||||
"issue_attachment",
|
||||
"issue_link",
|
||||
"sub_issues",
|
||||
"issue_related",
|
||||
]
|
||||
else False
|
||||
)
|
||||
@@ -130,11 +132,12 @@ class DynamicBaseSerializer(BaseSerializer):
|
||||
LabelSerializer,
|
||||
CycleIssueSerializer,
|
||||
IssueRelationSerializer,
|
||||
InboxIssueLiteSerializer,
|
||||
IntakeIssueLiteSerializer,
|
||||
IssueLiteSerializer,
|
||||
IssueReactionLiteSerializer,
|
||||
IssueAttachmentLiteSerializer,
|
||||
IssueLinkLiteSerializer,
|
||||
RelatedIssueSerializer,
|
||||
)
|
||||
|
||||
# Expansion mapper
|
||||
@@ -155,7 +158,8 @@ class DynamicBaseSerializer(BaseSerializer):
|
||||
"issue_cycle": CycleIssueSerializer,
|
||||
"parent": IssueLiteSerializer,
|
||||
"issue_relation": IssueRelationSerializer,
|
||||
"issue_inbox": InboxIssueLiteSerializer,
|
||||
"issue_intake": IntakeIssueLiteSerializer,
|
||||
"issue_related": RelatedIssueSerializer,
|
||||
"issue_reactions": IssueReactionLiteSerializer,
|
||||
"issue_attachment": IssueAttachmentLiteSerializer,
|
||||
"issue_link": IssueLinkLiteSerializer,
|
||||
@@ -178,4 +182,29 @@ class DynamicBaseSerializer(BaseSerializer):
|
||||
instance, f"{expand}_id", None
|
||||
)
|
||||
|
||||
# Check if issue_attachments is in fields or expand
|
||||
if (
|
||||
"issue_attachments" in self.fields
|
||||
or "issue_attachments" in self.expand
|
||||
):
|
||||
# Import the model here to avoid circular imports
|
||||
from plane.db.models import FileAsset
|
||||
|
||||
issue_id = getattr(instance, "id", None)
|
||||
|
||||
if issue_id:
|
||||
# Fetch related issue_attachments
|
||||
issue_attachments = FileAsset.objects.filter(
|
||||
issue_id=issue_id,
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
)
|
||||
# Serialize issue_attachments and add them to the response
|
||||
response["issue_attachments"] = (
|
||||
IssueAttachmentLiteSerializer(
|
||||
issue_attachments, many=True
|
||||
).data
|
||||
)
|
||||
else:
|
||||
response["issue_attachments"] = []
|
||||
|
||||
return response
|
||||
|
||||
292
apiserver/plane/app/serializers/draft.py
Normal file
292
apiserver/plane/app/serializers/draft.py
Normal file
@@ -0,0 +1,292 @@
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import (
|
||||
User,
|
||||
Issue,
|
||||
Label,
|
||||
State,
|
||||
DraftIssue,
|
||||
DraftIssueAssignee,
|
||||
DraftIssueLabel,
|
||||
DraftIssueCycle,
|
||||
DraftIssueModule,
|
||||
)
|
||||
|
||||
|
||||
class DraftIssueCreateSerializer(BaseSerializer):
|
||||
# ids
|
||||
state_id = serializers.PrimaryKeyRelatedField(
|
||||
source="state",
|
||||
queryset=State.objects.all(),
|
||||
required=False,
|
||||
allow_null=True,
|
||||
)
|
||||
parent_id = serializers.PrimaryKeyRelatedField(
|
||||
source="parent",
|
||||
queryset=Issue.objects.all(),
|
||||
required=False,
|
||||
allow_null=True,
|
||||
)
|
||||
label_ids = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
assignee_ids = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = DraftIssue
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
assignee_ids = self.initial_data.get("assignee_ids")
|
||||
data["assignee_ids"] = assignee_ids if assignee_ids else []
|
||||
label_ids = self.initial_data.get("label_ids")
|
||||
data["label_ids"] = label_ids if label_ids else []
|
||||
return data
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("target_date", None) is not None
|
||||
and data.get("start_date", None) > data.get("target_date", None)
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"Start date cannot exceed target date"
|
||||
)
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
assignees = validated_data.pop("assignee_ids", None)
|
||||
labels = validated_data.pop("label_ids", None)
|
||||
modules = validated_data.pop("module_ids", None)
|
||||
cycle_id = self.initial_data.get("cycle_id", None)
|
||||
modules = self.initial_data.get("module_ids", None)
|
||||
|
||||
workspace_id = self.context["workspace_id"]
|
||||
project_id = self.context["project_id"]
|
||||
|
||||
# Create Issue
|
||||
issue = DraftIssue.objects.create(
|
||||
**validated_data,
|
||||
workspace_id=workspace_id,
|
||||
project_id=project_id,
|
||||
)
|
||||
|
||||
# Issue Audit Users
|
||||
created_by_id = issue.created_by_id
|
||||
updated_by_id = issue.updated_by_id
|
||||
|
||||
if assignees is not None and len(assignees):
|
||||
DraftIssueAssignee.objects.bulk_create(
|
||||
[
|
||||
DraftIssueAssignee(
|
||||
assignee=user,
|
||||
draft_issue=issue,
|
||||
workspace_id=workspace_id,
|
||||
project_id=project_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for user in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
if labels is not None and len(labels):
|
||||
DraftIssueLabel.objects.bulk_create(
|
||||
[
|
||||
DraftIssueLabel(
|
||||
label=label,
|
||||
draft_issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
if cycle_id is not None:
|
||||
DraftIssueCycle.objects.create(
|
||||
cycle_id=cycle_id,
|
||||
draft_issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
|
||||
if modules is not None and len(modules):
|
||||
DraftIssueModule.objects.bulk_create(
|
||||
[
|
||||
DraftIssueModule(
|
||||
module_id=module_id,
|
||||
draft_issue=issue,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for module_id in modules
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
return issue
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
assignees = validated_data.pop("assignee_ids", None)
|
||||
labels = validated_data.pop("label_ids", None)
|
||||
cycle_id = self.context.get("cycle_id", None)
|
||||
modules = self.initial_data.get("module_ids", None)
|
||||
|
||||
# Related models
|
||||
workspace_id = instance.workspace_id
|
||||
project_id = instance.project_id
|
||||
|
||||
created_by_id = instance.created_by_id
|
||||
updated_by_id = instance.updated_by_id
|
||||
|
||||
if assignees is not None:
|
||||
DraftIssueAssignee.objects.filter(draft_issue=instance).delete()
|
||||
DraftIssueAssignee.objects.bulk_create(
|
||||
[
|
||||
DraftIssueAssignee(
|
||||
assignee=user,
|
||||
draft_issue=instance,
|
||||
workspace_id=workspace_id,
|
||||
project_id=project_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for user in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
if labels is not None:
|
||||
DraftIssueLabel.objects.filter(draft_issue=instance).delete()
|
||||
DraftIssueLabel.objects.bulk_create(
|
||||
[
|
||||
DraftIssueLabel(
|
||||
label=label,
|
||||
draft_issue=instance,
|
||||
workspace_id=workspace_id,
|
||||
project_id=project_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
if cycle_id != "not_provided":
|
||||
DraftIssueCycle.objects.filter(draft_issue=instance).delete()
|
||||
if cycle_id:
|
||||
DraftIssueCycle.objects.create(
|
||||
cycle_id=cycle_id,
|
||||
draft_issue=instance,
|
||||
workspace_id=workspace_id,
|
||||
project_id=project_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
|
||||
if modules is not None:
|
||||
DraftIssueModule.objects.filter(draft_issue=instance).delete()
|
||||
DraftIssueModule.objects.bulk_create(
|
||||
[
|
||||
DraftIssueModule(
|
||||
module_id=module_id,
|
||||
draft_issue=instance,
|
||||
workspace_id=workspace_id,
|
||||
project_id=project_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for module_id in modules
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
# Time updation occurs even when other related models are updated
|
||||
instance.updated_at = timezone.now()
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class DraftIssueSerializer(BaseSerializer):
|
||||
# ids
|
||||
cycle_id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
module_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(),
|
||||
required=False,
|
||||
)
|
||||
|
||||
# Many to many
|
||||
label_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(),
|
||||
required=False,
|
||||
)
|
||||
assignee_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(),
|
||||
required=False,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = DraftIssue
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"type_id",
|
||||
"description_html",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class DraftIssueDetailSerializer(DraftIssueSerializer):
|
||||
description_html = serializers.CharField()
|
||||
|
||||
class Meta(DraftIssueSerializer.Meta):
|
||||
fields = DraftIssueSerializer.Meta.fields + [
|
||||
"description_html",
|
||||
]
|
||||
read_only_fields = fields
|
||||
@@ -4,22 +4,22 @@ from rest_framework import serializers
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .issue import (
|
||||
IssueInboxSerializer,
|
||||
IssueIntakeSerializer,
|
||||
LabelLiteSerializer,
|
||||
IssueDetailSerializer,
|
||||
)
|
||||
from .project import ProjectLiteSerializer
|
||||
from .state import StateLiteSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from plane.db.models import Inbox, InboxIssue, Issue
|
||||
from plane.db.models import Intake, IntakeIssue, Issue
|
||||
|
||||
|
||||
class InboxSerializer(BaseSerializer):
|
||||
class IntakeSerializer(BaseSerializer):
|
||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||
pending_issue_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Inbox
|
||||
model = Intake
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"project",
|
||||
@@ -27,11 +27,11 @@ class InboxSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class InboxIssueSerializer(BaseSerializer):
|
||||
issue = IssueInboxSerializer(read_only=True)
|
||||
class IntakeIssueSerializer(BaseSerializer):
|
||||
issue = IssueIntakeSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = InboxIssue
|
||||
model = IntakeIssue
|
||||
fields = [
|
||||
"id",
|
||||
"status",
|
||||
@@ -53,14 +53,14 @@ class InboxIssueSerializer(BaseSerializer):
|
||||
return super().to_representation(instance)
|
||||
|
||||
|
||||
class InboxIssueDetailSerializer(BaseSerializer):
|
||||
class IntakeIssueDetailSerializer(BaseSerializer):
|
||||
issue = IssueDetailSerializer(read_only=True)
|
||||
duplicate_issue_detail = IssueInboxSerializer(
|
||||
duplicate_issue_detail = IssueIntakeSerializer(
|
||||
read_only=True, source="duplicate_to"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = InboxIssue
|
||||
model = IntakeIssue
|
||||
fields = [
|
||||
"id",
|
||||
"status",
|
||||
@@ -85,14 +85,14 @@ class InboxIssueDetailSerializer(BaseSerializer):
|
||||
return super().to_representation(instance)
|
||||
|
||||
|
||||
class InboxIssueLiteSerializer(BaseSerializer):
|
||||
class IntakeIssueLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = InboxIssue
|
||||
model = IntakeIssue
|
||||
fields = ["id", "status", "duplicate_to", "snoozed_till", "source"]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class IssueStateInboxSerializer(BaseSerializer):
|
||||
class IssueStateIntakeSerializer(BaseSerializer):
|
||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
label_details = LabelLiteSerializer(
|
||||
@@ -102,7 +102,7 @@ class IssueStateInboxSerializer(BaseSerializer):
|
||||
read_only=True, source="assignees", many=True
|
||||
)
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True)
|
||||
issue_intake = IntakeIssueLiteSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
@@ -27,7 +27,7 @@ from plane.db.models import (
|
||||
Module,
|
||||
ModuleIssue,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
FileAsset,
|
||||
IssueReaction,
|
||||
CommentReaction,
|
||||
IssueVote,
|
||||
@@ -95,6 +95,8 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
project_id = serializers.UUIDField(source="project.id", read_only=True)
|
||||
workspace_id = serializers.UUIDField(source="workspace.id", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
@@ -437,17 +439,21 @@ class IssueLinkSerializer(BaseSerializer):
|
||||
"issue",
|
||||
]
|
||||
|
||||
def validate_url(self, value):
|
||||
# Check URL format
|
||||
validate_url = URLValidator()
|
||||
try:
|
||||
validate_url(value)
|
||||
except ValidationError:
|
||||
raise serializers.ValidationError("Invalid URL format.")
|
||||
def to_internal_value(self, data):
|
||||
# Modify the URL before validation by appending http:// if missing
|
||||
url = data.get("url", "")
|
||||
if url and not url.startswith(("http://", "https://")):
|
||||
data["url"] = "http://" + url
|
||||
|
||||
# Check URL scheme
|
||||
if not value.startswith(("http://", "https://")):
|
||||
raise serializers.ValidationError("Invalid URL scheme.")
|
||||
return super().to_internal_value(data)
|
||||
|
||||
def validate_url(self, value):
|
||||
# Use Django's built-in URLValidator for validation
|
||||
url_validator = URLValidator()
|
||||
try:
|
||||
url_validator(value)
|
||||
except ValidationError:
|
||||
raise serializers.ValidationError({"error": "Invalid URL format."})
|
||||
|
||||
return value
|
||||
|
||||
@@ -494,8 +500,11 @@ class IssueLinkLiteSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class IssueAttachmentSerializer(BaseSerializer):
|
||||
|
||||
asset_url = serializers.CharField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = IssueAttachment
|
||||
model = FileAsset
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"created_by",
|
||||
@@ -510,14 +519,15 @@ class IssueAttachmentSerializer(BaseSerializer):
|
||||
|
||||
class IssueAttachmentLiteSerializer(DynamicBaseSerializer):
|
||||
class Meta:
|
||||
model = IssueAttachment
|
||||
model = FileAsset
|
||||
fields = [
|
||||
"id",
|
||||
"asset",
|
||||
"attributes",
|
||||
"issue_id",
|
||||
# "issue_id",
|
||||
"updated_at",
|
||||
"updated_by",
|
||||
"asset_url",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
@@ -533,7 +543,7 @@ class IssueReactionSerializer(BaseSerializer):
|
||||
"project",
|
||||
"issue",
|
||||
"actor",
|
||||
"deleted_at"
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
|
||||
@@ -552,7 +562,13 @@ class CommentReactionSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = CommentReaction
|
||||
fields = "__all__"
|
||||
read_only_fields = ["workspace", "project", "comment", "actor", "deleted_at"]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"comment",
|
||||
"actor",
|
||||
"deleted_at",
|
||||
]
|
||||
|
||||
|
||||
class IssueVoteSerializer(BaseSerializer):
|
||||
@@ -629,7 +645,7 @@ class IssueStateSerializer(DynamicBaseSerializer):
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class IssueInboxSerializer(DynamicBaseSerializer):
|
||||
class IssueIntakeSerializer(DynamicBaseSerializer):
|
||||
label_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(),
|
||||
required=False,
|
||||
|
||||
@@ -5,6 +5,10 @@ from rest_framework import serializers
|
||||
from .base import BaseSerializer, DynamicBaseSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
|
||||
# Django imports
|
||||
from django.core.validators import URLValidator
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from plane.db.models import (
|
||||
User,
|
||||
Module,
|
||||
@@ -64,6 +68,16 @@ class ModuleWriteSerializer(BaseSerializer):
|
||||
members = validated_data.pop("member_ids", None)
|
||||
project = self.context["project"]
|
||||
|
||||
module_name = validated_data.get("name")
|
||||
if module_name:
|
||||
# Lookup for the module name in the module table for that project
|
||||
if Module.objects.filter(
|
||||
name=module_name, project=project
|
||||
).exists():
|
||||
raise serializers.ValidationError(
|
||||
{"error": "Module with this name already exists"}
|
||||
)
|
||||
|
||||
module = Module.objects.create(**validated_data, project=project)
|
||||
if members is not None:
|
||||
ModuleMember.objects.bulk_create(
|
||||
@@ -86,6 +100,19 @@ class ModuleWriteSerializer(BaseSerializer):
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
members = validated_data.pop("member_ids", None)
|
||||
module_name = validated_data.get("name")
|
||||
if module_name:
|
||||
# Lookup for the module name in the module table for that project
|
||||
if (
|
||||
Module.objects.filter(
|
||||
name=module_name, project=instance.project
|
||||
)
|
||||
.exclude(id=instance.id)
|
||||
.exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
{"error": "Module with this name already exists"}
|
||||
)
|
||||
|
||||
if members is not None:
|
||||
ModuleMember.objects.filter(module=instance).delete()
|
||||
@@ -155,16 +182,48 @@ class ModuleLinkSerializer(BaseSerializer):
|
||||
"module",
|
||||
]
|
||||
|
||||
# Validation if url already exists
|
||||
def to_internal_value(self, data):
|
||||
# Modify the URL before validation by appending http:// if missing
|
||||
url = data.get("url", "")
|
||||
if url and not url.startswith(("http://", "https://")):
|
||||
data["url"] = "http://" + url
|
||||
|
||||
return super().to_internal_value(data)
|
||||
|
||||
def validate_url(self, value):
|
||||
# Use Django's built-in URLValidator for validation
|
||||
url_validator = URLValidator()
|
||||
try:
|
||||
url_validator(value)
|
||||
except ValidationError:
|
||||
raise serializers.ValidationError({"error": "Invalid URL format."})
|
||||
|
||||
return value
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data["url"] = self.validate_url(validated_data.get("url"))
|
||||
if ModuleLink.objects.filter(
|
||||
url=validated_data.get("url"),
|
||||
module_id=validated_data.get("module_id"),
|
||||
).exists():
|
||||
raise serializers.ValidationError({"error": "URL already exists."})
|
||||
return super().create(validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
validated_data["url"] = self.validate_url(validated_data.get("url"))
|
||||
if (
|
||||
ModuleLink.objects.filter(
|
||||
url=validated_data.get("url"),
|
||||
module_id=instance.module_id,
|
||||
)
|
||||
.exclude(pk=instance.id)
|
||||
.exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
{"error": "URL already exists for this Issue"}
|
||||
)
|
||||
return ModuleLink.objects.create(**validated_data)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class ModuleSerializer(DynamicBaseSerializer):
|
||||
@@ -229,7 +288,14 @@ class ModuleDetailSerializer(ModuleSerializer):
|
||||
cancelled_estimate_points = serializers.FloatField(read_only=True)
|
||||
|
||||
class Meta(ModuleSerializer.Meta):
|
||||
fields = ModuleSerializer.Meta.fields + ["link_module", "sub_issues", "backlog_estimate_points", "unstarted_estimate_points", "started_estimate_points", "cancelled_estimate_points"]
|
||||
fields = ModuleSerializer.Meta.fields + [
|
||||
"link_module",
|
||||
"sub_issues",
|
||||
"backlog_estimate_points",
|
||||
"unstarted_estimate_points",
|
||||
"started_estimate_points",
|
||||
"cancelled_estimate_points",
|
||||
]
|
||||
|
||||
|
||||
class ModuleUserPropertiesSerializer(BaseSerializer):
|
||||
|
||||
@@ -12,6 +12,7 @@ class NotificationSerializer(BaseSerializer):
|
||||
read_only=True, source="triggered_by"
|
||||
)
|
||||
is_inbox_issue = serializers.BooleanField(read_only=True)
|
||||
is_intake_issue = serializers.BooleanField(read_only=True)
|
||||
is_mentioned_notification = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -167,7 +167,40 @@ class PageLogSerializer(BaseSerializer):
|
||||
class PageVersionSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = PageVersion
|
||||
fields = "__all__"
|
||||
fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"page",
|
||||
"last_saved_at",
|
||||
"owned_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"page",
|
||||
]
|
||||
|
||||
|
||||
class PageVersionDetailSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = PageVersion
|
||||
fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"page",
|
||||
"last_saved_at",
|
||||
"description_binary",
|
||||
"description_html",
|
||||
"description_json",
|
||||
"owned_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"page",
|
||||
|
||||
@@ -22,6 +22,7 @@ class ProjectSerializer(BaseSerializer):
|
||||
workspace_detail = WorkspaceLiteSerializer(
|
||||
source="workspace", read_only=True
|
||||
)
|
||||
inbox_view = serializers.BooleanField(read_only=True, source="intake_view")
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
@@ -95,6 +96,7 @@ class ProjectLiteSerializer(BaseSerializer):
|
||||
"identifier",
|
||||
"name",
|
||||
"cover_image",
|
||||
"cover_image_url",
|
||||
"logo_props",
|
||||
"description",
|
||||
]
|
||||
@@ -117,6 +119,8 @@ class ProjectListSerializer(DynamicBaseSerializer):
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
anchor = serializers.CharField(read_only=True)
|
||||
members = serializers.SerializerMethodField()
|
||||
cover_image_url = serializers.CharField(read_only=True)
|
||||
inbox_view = serializers.BooleanField(read_only=True, source="intake_view")
|
||||
|
||||
def get_members(self, obj):
|
||||
project_members = getattr(obj, "members_list", None)
|
||||
@@ -128,6 +132,7 @@ class ProjectListSerializer(DynamicBaseSerializer):
|
||||
"member_id": member.member_id,
|
||||
"member__display_name": member.member.display_name,
|
||||
"member__avatar": member.member.avatar,
|
||||
"member__avatar_url": member.member.avatar_url,
|
||||
}
|
||||
for member in project_members
|
||||
]
|
||||
|
||||
@@ -16,26 +16,39 @@ from .base import BaseSerializer
|
||||
class UserSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = "__all__"
|
||||
# Exclude password field from the serializer
|
||||
fields = [
|
||||
field.name
|
||||
for field in User._meta.fields
|
||||
if field.name != "password"
|
||||
]
|
||||
# Make all system fields and email read only
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"username",
|
||||
"mobile_number",
|
||||
"email",
|
||||
"token",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"is_superuser",
|
||||
"is_staff",
|
||||
"is_managed",
|
||||
"last_active",
|
||||
"last_login_time",
|
||||
"last_logout_time",
|
||||
"last_login_ip",
|
||||
"last_logout_ip",
|
||||
"last_login_uagent",
|
||||
"token_updated_at",
|
||||
"last_location",
|
||||
"last_login_medium",
|
||||
"created_location",
|
||||
"is_bot",
|
||||
"is_password_autoset",
|
||||
"is_email_verified",
|
||||
"is_active",
|
||||
"token_updated_at",
|
||||
]
|
||||
extra_kwargs = {"password": {"write_only": True}}
|
||||
|
||||
# If the user has already filled first name or last name then he is onboarded
|
||||
def get_is_onboarded(self, obj):
|
||||
@@ -43,12 +56,15 @@ class UserSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class UserMeSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
"id",
|
||||
"avatar",
|
||||
"cover_image",
|
||||
"avatar_url",
|
||||
"cover_image_url",
|
||||
"date_joined",
|
||||
"display_name",
|
||||
"email",
|
||||
@@ -143,6 +159,7 @@ class UserLiteSerializer(BaseSerializer):
|
||||
"first_name",
|
||||
"last_name",
|
||||
"avatar",
|
||||
"avatar_url",
|
||||
"is_bot",
|
||||
"display_name",
|
||||
]
|
||||
@@ -160,9 +177,11 @@ class UserAdminLiteSerializer(BaseSerializer):
|
||||
"first_name",
|
||||
"last_name",
|
||||
"avatar",
|
||||
"avatar_url",
|
||||
"is_bot",
|
||||
"display_name",
|
||||
"email",
|
||||
"last_login_medium",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
@@ -208,9 +227,15 @@ class ProfileSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Profile
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"user",
|
||||
]
|
||||
|
||||
|
||||
class AccountSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Account
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"user",
|
||||
]
|
||||
|
||||
@@ -40,7 +40,7 @@ class WebhookSerializer(DynamicBaseSerializer):
|
||||
|
||||
for addr in ip_addresses:
|
||||
ip = ipaddress.ip_address(addr[4][0])
|
||||
if ip.is_private or ip.is_loopback:
|
||||
if ip.is_loopback:
|
||||
raise serializers.ValidationError(
|
||||
{"url": "URL resolves to a blocked IP address."}
|
||||
)
|
||||
@@ -92,7 +92,7 @@ class WebhookSerializer(DynamicBaseSerializer):
|
||||
|
||||
for addr in ip_addresses:
|
||||
ip = ipaddress.ip_address(addr[4][0])
|
||||
if ip.is_private or ip.is_loopback:
|
||||
if ip.is_loopback:
|
||||
raise serializers.ValidationError(
|
||||
{"url": "URL resolves to a blocked IP address."}
|
||||
)
|
||||
|
||||
@@ -22,6 +22,7 @@ class WorkSpaceSerializer(DynamicBaseSerializer):
|
||||
owner = UserLiteSerializer(read_only=True)
|
||||
total_members = serializers.IntegerField(read_only=True)
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
logo_url = serializers.CharField(read_only=True)
|
||||
|
||||
def validate_slug(self, value):
|
||||
# Check if the slug is restricted
|
||||
@@ -39,6 +40,7 @@ class WorkSpaceSerializer(DynamicBaseSerializer):
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"owner",
|
||||
"logo_url",
|
||||
]
|
||||
|
||||
|
||||
@@ -63,6 +65,7 @@ class WorkSpaceMemberSerializer(DynamicBaseSerializer):
|
||||
|
||||
|
||||
class WorkspaceMemberMeSerializer(BaseSerializer):
|
||||
draft_issue_count = serializers.IntegerField(read_only=True)
|
||||
class Meta:
|
||||
model = WorkspaceMember
|
||||
fields = "__all__"
|
||||
|
||||
@@ -5,7 +5,7 @@ from .cycle import urlpatterns as cycle_urls
|
||||
from .dashboard import urlpatterns as dashboard_urls
|
||||
from .estimate import urlpatterns as estimate_urls
|
||||
from .external import urlpatterns as external_urls
|
||||
from .inbox import urlpatterns as inbox_urls
|
||||
from .intake import urlpatterns as intake_urls
|
||||
from .issue import urlpatterns as issue_urls
|
||||
from .module import urlpatterns as module_urls
|
||||
from .notification import urlpatterns as notification_urls
|
||||
@@ -25,7 +25,7 @@ urlpatterns = [
|
||||
*dashboard_urls,
|
||||
*estimate_urls,
|
||||
*external_urls,
|
||||
*inbox_urls,
|
||||
*intake_urls,
|
||||
*issue_urls,
|
||||
*module_urls,
|
||||
*notification_urls,
|
||||
|
||||
@@ -5,6 +5,13 @@ from plane.app.views import (
|
||||
FileAssetEndpoint,
|
||||
UserAssetsEndpoint,
|
||||
FileAssetViewSet,
|
||||
# V2 Endpoints
|
||||
WorkspaceFileAssetEndpoint,
|
||||
UserAssetsV2Endpoint,
|
||||
StaticFileAssetEndpoint,
|
||||
AssetRestoreEndpoint,
|
||||
ProjectAssetEndpoint,
|
||||
ProjectBulkAssetEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -38,4 +45,49 @@ urlpatterns = [
|
||||
),
|
||||
name="file-assets-restore",
|
||||
),
|
||||
# V2 Endpoints
|
||||
path(
|
||||
"assets/v2/workspaces/<str:slug>/",
|
||||
WorkspaceFileAssetEndpoint.as_view(),
|
||||
name="workspace-file-assets",
|
||||
),
|
||||
path(
|
||||
"assets/v2/workspaces/<str:slug>/<uuid:asset_id>/",
|
||||
WorkspaceFileAssetEndpoint.as_view(),
|
||||
name="workspace-file-assets",
|
||||
),
|
||||
path(
|
||||
"assets/v2/user-assets/",
|
||||
UserAssetsV2Endpoint.as_view(),
|
||||
name="user-file-assets",
|
||||
),
|
||||
path(
|
||||
"assets/v2/user-assets/<uuid:asset_id>/",
|
||||
UserAssetsV2Endpoint.as_view(),
|
||||
name="user-file-assets",
|
||||
),
|
||||
path(
|
||||
"assets/v2/workspaces/<str:slug>/restore/<uuid:asset_id>/",
|
||||
AssetRestoreEndpoint.as_view(),
|
||||
name="asset-restore",
|
||||
),
|
||||
path(
|
||||
"assets/v2/static/<uuid:asset_id>/",
|
||||
StaticFileAssetEndpoint.as_view(),
|
||||
name="static-file-asset",
|
||||
),
|
||||
path(
|
||||
"assets/v2/workspaces/<str:slug>/projects/<uuid:project_id>/",
|
||||
ProjectAssetEndpoint.as_view(),
|
||||
name="bulk-asset-update",
|
||||
),
|
||||
path(
|
||||
"assets/v2/workspaces/<str:slug>/projects/<uuid:project_id>/<uuid:pk>/",
|
||||
ProjectAssetEndpoint.as_view(),
|
||||
name="bulk-asset-update",
|
||||
),
|
||||
path(
|
||||
"assets/v2/workspaces/<str:slug>/projects/<uuid:project_id>/<uuid:entity_id>/bulk/",
|
||||
ProjectBulkAssetEndpoint.as_view(),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,6 +6,8 @@ from plane.app.views import (
|
||||
CycleIssueViewSet,
|
||||
CycleDateCheckEndpoint,
|
||||
CycleFavoriteViewSet,
|
||||
CycleProgressEndpoint,
|
||||
CycleAnalyticsEndpoint,
|
||||
TransferCycleIssueEndpoint,
|
||||
CycleUserPropertiesEndpoint,
|
||||
CycleArchiveUnarchiveEndpoint,
|
||||
@@ -106,4 +108,14 @@ urlpatterns = [
|
||||
CycleArchiveUnarchiveEndpoint.as_view(),
|
||||
name="cycle-archive-unarchive",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/progress/",
|
||||
CycleProgressEndpoint.as_view(),
|
||||
name="project-cycle",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/analytics/",
|
||||
CycleAnalyticsEndpoint.as_view(),
|
||||
name="project-cycle",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.app.views import (
|
||||
InboxViewSet,
|
||||
InboxIssueViewSet,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
|
||||
InboxViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="inbox",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:pk>/",
|
||||
InboxViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="inbox",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/",
|
||||
InboxIssueViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:issue_id>/",
|
||||
InboxIssueViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
]
|
||||
95
apiserver/plane/app/urls/intake.py
Normal file
95
apiserver/plane/app/urls/intake.py
Normal file
@@ -0,0 +1,95 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.app.views import (
|
||||
IntakeViewSet,
|
||||
IntakeIssueViewSet,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/intakes/",
|
||||
IntakeViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="intake",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/intakes/<uuid:pk>/",
|
||||
IntakeViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="intake",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-issues/",
|
||||
IntakeIssueViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="intake-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-issues/<uuid:pk>/",
|
||||
IntakeIssueViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="intake-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
|
||||
IntakeViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="inbox",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:pk>/",
|
||||
IntakeViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="inbox",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/",
|
||||
IntakeIssueViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:pk>/",
|
||||
IntakeIssueViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
]
|
||||
@@ -11,7 +11,6 @@ from plane.app.views import (
|
||||
IssueActivityEndpoint,
|
||||
IssueArchiveViewSet,
|
||||
IssueCommentViewSet,
|
||||
IssueDraftViewSet,
|
||||
IssueListEndpoint,
|
||||
IssueReactionViewSet,
|
||||
IssueRelationViewSet,
|
||||
@@ -19,8 +18,12 @@ from plane.app.views import (
|
||||
IssueUserDisplayPropertyEndpoint,
|
||||
IssueViewSet,
|
||||
LabelViewSet,
|
||||
BulkIssueOperationsEndpoint,
|
||||
BulkArchiveIssuesEndpoint,
|
||||
DeletedIssuesListViewSet,
|
||||
IssuePaginatedViewSet,
|
||||
IssueDetailEndpoint,
|
||||
IssueAttachmentV2Endpoint,
|
||||
IssueBulkUpdateDateEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
@@ -39,6 +42,18 @@ urlpatterns = [
|
||||
),
|
||||
name="project-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues-detail/",
|
||||
IssueDetailEndpoint.as_view(),
|
||||
name="project-issue-detail",
|
||||
),
|
||||
# updated v1 paginated issues
|
||||
# updated v2 paginated issues
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/v2/issues/",
|
||||
IssuePaginatedViewSet.as_view({"get": "list"}),
|
||||
name="project-issues-paginated",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
|
||||
IssueViewSet.as_view(
|
||||
@@ -126,6 +141,18 @@ urlpatterns = [
|
||||
IssueAttachmentEndpoint.as_view(),
|
||||
name="project-issue-attachments",
|
||||
),
|
||||
# V2 Attachments
|
||||
path(
|
||||
"assets/v2/workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/attachments/",
|
||||
IssueAttachmentV2Endpoint.as_view(),
|
||||
name="project-issue-attachments",
|
||||
),
|
||||
path(
|
||||
"assets/v2/workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/attachments/<uuid:pk>/",
|
||||
IssueAttachmentV2Endpoint.as_view(),
|
||||
name="project-issue-attachments",
|
||||
),
|
||||
## Export Issues
|
||||
path(
|
||||
"workspaces/<str:slug>/export-issues/",
|
||||
ExportIssuesEndpoint.as_view(),
|
||||
@@ -283,31 +310,14 @@ urlpatterns = [
|
||||
name="issue-relation",
|
||||
),
|
||||
## End Issue Relation
|
||||
## Issue Drafts
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/",
|
||||
IssueDraftViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-draft",
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/deleted-issues/",
|
||||
DeletedIssuesListViewSet.as_view(),
|
||||
name="deleted-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/<uuid:pk>/",
|
||||
IssueDraftViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-draft",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-operation-issues/",
|
||||
BulkIssueOperationsEndpoint.as_view(),
|
||||
name="bulk-operations-issues",
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-dates/",
|
||||
IssueBulkUpdateDateEndpoint.as_view(),
|
||||
name="project-issue-dates",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -27,6 +27,7 @@ from plane.app.views import (
|
||||
WorkspaceCyclesEndpoint,
|
||||
WorkspaceFavoriteEndpoint,
|
||||
WorkspaceFavoriteGroupEndpoint,
|
||||
WorkspaceDraftIssueViewSet,
|
||||
)
|
||||
|
||||
|
||||
@@ -254,4 +255,30 @@ urlpatterns = [
|
||||
WorkspaceFavoriteGroupEndpoint.as_view(),
|
||||
name="workspace-user-favorites-groups",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/draft-issues/",
|
||||
WorkspaceDraftIssueViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="workspace-draft-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/draft-issues/<uuid:pk>/",
|
||||
WorkspaceDraftIssueViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="workspace-drafts-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/draft-to-issue/<uuid:draft_id>/",
|
||||
WorkspaceDraftIssueViewSet.as_view({"post": "create_draft_to_issue"}),
|
||||
name="workspace-drafts-issues",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -40,6 +40,8 @@ from .workspace.base import (
|
||||
ExportWorkspaceUserActivityEndpoint,
|
||||
)
|
||||
|
||||
from .workspace.draft import WorkspaceDraftIssueViewSet
|
||||
|
||||
from .workspace.favorite import (
|
||||
WorkspaceFavoriteEndpoint,
|
||||
WorkspaceFavoriteGroupEndpoint,
|
||||
@@ -98,6 +100,8 @@ from .cycle.base import (
|
||||
CycleUserPropertiesEndpoint,
|
||||
CycleViewSet,
|
||||
TransferCycleIssueEndpoint,
|
||||
CycleAnalyticsEndpoint,
|
||||
CycleProgressEndpoint,
|
||||
)
|
||||
from .cycle.issue import (
|
||||
CycleIssueViewSet,
|
||||
@@ -106,12 +110,28 @@ from .cycle.archive import (
|
||||
CycleArchiveUnarchiveEndpoint,
|
||||
)
|
||||
|
||||
from .asset.base import FileAssetEndpoint, UserAssetsEndpoint, FileAssetViewSet
|
||||
from .asset.base import (
|
||||
FileAssetEndpoint,
|
||||
UserAssetsEndpoint,
|
||||
FileAssetViewSet,
|
||||
)
|
||||
from .asset.v2 import (
|
||||
WorkspaceFileAssetEndpoint,
|
||||
UserAssetsV2Endpoint,
|
||||
StaticFileAssetEndpoint,
|
||||
AssetRestoreEndpoint,
|
||||
ProjectAssetEndpoint,
|
||||
ProjectBulkAssetEndpoint,
|
||||
)
|
||||
from .issue.base import (
|
||||
IssueListEndpoint,
|
||||
IssueViewSet,
|
||||
IssueUserDisplayPropertyEndpoint,
|
||||
BulkDeleteIssuesEndpoint,
|
||||
DeletedIssuesListViewSet,
|
||||
IssuePaginatedViewSet,
|
||||
IssueDetailEndpoint,
|
||||
IssueBulkUpdateDateEndpoint,
|
||||
)
|
||||
|
||||
from .issue.activity import (
|
||||
@@ -122,6 +142,8 @@ from .issue.archive import IssueArchiveViewSet, BulkArchiveIssuesEndpoint
|
||||
|
||||
from .issue.attachment import (
|
||||
IssueAttachmentEndpoint,
|
||||
# V2
|
||||
IssueAttachmentV2Endpoint,
|
||||
)
|
||||
|
||||
from .issue.comment import (
|
||||
@@ -129,8 +151,6 @@ from .issue.comment import (
|
||||
CommentReactionViewSet,
|
||||
)
|
||||
|
||||
from .issue.draft import IssueDraftViewSet
|
||||
|
||||
from .issue.label import (
|
||||
LabelViewSet,
|
||||
BulkCreateIssueLabelsEndpoint,
|
||||
@@ -156,9 +176,6 @@ from .issue.subscriber import (
|
||||
IssueSubscriberViewSet,
|
||||
)
|
||||
|
||||
|
||||
from .issue.bulk_operations import BulkIssueOperationsEndpoint
|
||||
|
||||
from .module.base import (
|
||||
ModuleViewSet,
|
||||
ModuleLinkViewSet,
|
||||
@@ -203,7 +220,7 @@ from .estimate.base import (
|
||||
EstimatePointEndpoint,
|
||||
)
|
||||
|
||||
from .inbox.base import InboxViewSet, InboxIssueViewSet
|
||||
from .intake.base import IntakeViewSet, IntakeIssueViewSet
|
||||
|
||||
from .analytic.base import (
|
||||
AnalyticsEndpoint,
|
||||
|
||||
@@ -1,28 +1,35 @@
|
||||
# Django imports
|
||||
from django.db.models import Count, F, Sum
|
||||
from django.db.models import Count, F, Sum, Q
|
||||
from django.db.models.functions import ExtractMonth
|
||||
from django.utils import timezone
|
||||
from django.db.models.functions import Concat
|
||||
from django.db.models import Case, When, Value
|
||||
from django.db import models
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.app.permissions import WorkSpaceAdminPermission
|
||||
from plane.app.serializers import AnalyticViewSerializer
|
||||
|
||||
# Module imports
|
||||
from plane.app.views.base import BaseAPIView, BaseViewSet
|
||||
from plane.bgtasks.analytic_plot_export import analytic_export_task
|
||||
from plane.db.models import AnalyticView, Issue, Workspace
|
||||
from plane.utils.analytics_plot import build_graph_plot
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
|
||||
|
||||
class AnalyticsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
@allow_permission(
|
||||
[
|
||||
ROLE.ADMIN,
|
||||
ROLE.MEMBER,
|
||||
],
|
||||
level="WORKSPACE",
|
||||
)
|
||||
def get(self, request, slug):
|
||||
x_axis = request.GET.get("x_axis", False)
|
||||
y_axis = request.GET.get("y_axis", False)
|
||||
@@ -103,7 +110,10 @@ class AnalyticsEndpoint(BaseAPIView):
|
||||
if x_axis in ["labels__id"] or segment in ["labels__id"]:
|
||||
label_details = (
|
||||
Issue.objects.filter(
|
||||
workspace__slug=slug, **filters, labels__id__isnull=False
|
||||
workspace__slug=slug,
|
||||
**filters,
|
||||
labels__id__isnull=False,
|
||||
label_issue__deleted_at__isnull=True,
|
||||
)
|
||||
.distinct("labels__id")
|
||||
.order_by("labels__id")
|
||||
@@ -114,14 +124,37 @@ class AnalyticsEndpoint(BaseAPIView):
|
||||
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
|
||||
assignee_details = (
|
||||
Issue.issue_objects.filter(
|
||||
Q(
|
||||
Q(assignees__avatar__isnull=False)
|
||||
| Q(assignees__avatar_asset__isnull=False)
|
||||
),
|
||||
workspace__slug=slug,
|
||||
**filters,
|
||||
assignees__avatar__isnull=False,
|
||||
)
|
||||
.annotate(
|
||||
assignees__avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(
|
||||
assignees__avatar_asset__isnull=True,
|
||||
then="assignees__avatar",
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.order_by("assignees__id")
|
||||
.distinct("assignees__id")
|
||||
.values(
|
||||
"assignees__avatar",
|
||||
"assignees__avatar_url",
|
||||
"assignees__display_name",
|
||||
"assignees__first_name",
|
||||
"assignees__last_name",
|
||||
@@ -138,6 +171,7 @@ class AnalyticsEndpoint(BaseAPIView):
|
||||
workspace__slug=slug,
|
||||
**filters,
|
||||
issue_cycle__cycle_id__isnull=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
)
|
||||
.distinct("issue_cycle__cycle_id")
|
||||
.order_by("issue_cycle__cycle_id")
|
||||
@@ -156,6 +190,7 @@ class AnalyticsEndpoint(BaseAPIView):
|
||||
workspace__slug=slug,
|
||||
**filters,
|
||||
issue_module__module_id__isnull=False,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
)
|
||||
.distinct("issue_module__module_id")
|
||||
.order_by("issue_module__module_id")
|
||||
@@ -201,10 +236,14 @@ class AnalyticViewViewset(BaseViewSet):
|
||||
|
||||
|
||||
class SavedAnalyticEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
@allow_permission(
|
||||
[
|
||||
ROLE.ADMIN,
|
||||
ROLE.MEMBER,
|
||||
],
|
||||
level="WORKSPACE",
|
||||
)
|
||||
def get(self, request, slug, analytic_id):
|
||||
analytic_view = AnalyticView.objects.get(
|
||||
pk=analytic_id, workspace__slug=slug
|
||||
@@ -234,10 +273,14 @@ class SavedAnalyticEndpoint(BaseAPIView):
|
||||
|
||||
|
||||
class ExportAnalyticsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
@allow_permission(
|
||||
[
|
||||
ROLE.ADMIN,
|
||||
ROLE.MEMBER,
|
||||
],
|
||||
level="WORKSPACE",
|
||||
)
|
||||
def post(self, request, slug):
|
||||
x_axis = request.data.get("x_axis", False)
|
||||
y_axis = request.data.get("y_axis", False)
|
||||
@@ -301,10 +344,8 @@ class ExportAnalyticsEndpoint(BaseAPIView):
|
||||
|
||||
|
||||
class DefaultAnalyticsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE")
|
||||
def get(self, request, slug):
|
||||
filters = issue_filters(request.GET, "GET")
|
||||
base_issues = Issue.issue_objects.filter(
|
||||
@@ -345,7 +386,6 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
||||
user_details = [
|
||||
"created_by__first_name",
|
||||
"created_by__last_name",
|
||||
"created_by__avatar",
|
||||
"created_by__display_name",
|
||||
"created_by__id",
|
||||
]
|
||||
@@ -354,13 +394,32 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
||||
base_issues.exclude(created_by=None)
|
||||
.values(*user_details)
|
||||
.annotate(count=Count("id"))
|
||||
.annotate(
|
||||
created_by__avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
created_by__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"created_by__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(
|
||||
created_by__avatar_asset__isnull=True,
|
||||
then="created_by__avatar",
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.order_by("-count")[:5]
|
||||
)
|
||||
|
||||
user_assignee_details = [
|
||||
"assignees__first_name",
|
||||
"assignees__last_name",
|
||||
"assignees__avatar",
|
||||
"assignees__display_name",
|
||||
"assignees__id",
|
||||
]
|
||||
@@ -369,6 +428,26 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
||||
base_issues.filter(completed_at__isnull=False)
|
||||
.exclude(assignees=None)
|
||||
.values(*user_assignee_details)
|
||||
.annotate(
|
||||
assignees__avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(
|
||||
assignees__avatar_asset__isnull=True,
|
||||
then="assignees__avatar",
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")[:5]
|
||||
)
|
||||
@@ -377,15 +456,33 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
||||
base_issues.filter(completed_at__isnull=True)
|
||||
.values(*user_assignee_details)
|
||||
.annotate(count=Count("id"))
|
||||
.annotate(
|
||||
assignees__avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(
|
||||
assignees__avatar_asset__isnull=True,
|
||||
then="assignees__avatar",
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.order_by("-count")
|
||||
)
|
||||
|
||||
open_estimate_sum = open_issues_queryset.aggregate(
|
||||
sum=Sum("point")
|
||||
)["sum"]
|
||||
total_estimate_sum = base_issues.aggregate(sum=Sum("point"))[
|
||||
open_estimate_sum = open_issues_queryset.aggregate(sum=Sum("point"))[
|
||||
"sum"
|
||||
]
|
||||
total_estimate_sum = base_issues.aggregate(sum=Sum("point"))["sum"]
|
||||
|
||||
return Response(
|
||||
{
|
||||
|
||||
@@ -50,7 +50,7 @@ class FileAssetEndpoint(BaseAPIView):
|
||||
asset_key = str(workspace_id) + "/" + asset_key
|
||||
file_asset = FileAsset.objects.get(asset=asset_key)
|
||||
file_asset.is_deleted = True
|
||||
file_asset.save()
|
||||
file_asset.save(update_fields=["is_deleted"])
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@@ -59,7 +59,7 @@ class FileAssetViewSet(BaseViewSet):
|
||||
asset_key = str(workspace_id) + "/" + asset_key
|
||||
file_asset = FileAsset.objects.get(asset=asset_key)
|
||||
file_asset.is_deleted = False
|
||||
file_asset.save()
|
||||
file_asset.save(update_fields=["is_deleted"])
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@@ -96,5 +96,5 @@ class UserAssetsEndpoint(BaseAPIView):
|
||||
asset=asset_key, created_by=request.user
|
||||
)
|
||||
file_asset.is_deleted = True
|
||||
file_asset.save()
|
||||
file_asset.save(update_fields=["is_deleted"])
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
826
apiserver/plane/app/views/asset/v2.py
Normal file
826
apiserver/plane/app/views/asset/v2.py
Normal file
@@ -0,0 +1,826 @@
|
||||
# Python imports
|
||||
import uuid
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import AllowAny
|
||||
|
||||
# Module imports
|
||||
from ..base import BaseAPIView
|
||||
from plane.db.models import (
|
||||
FileAsset,
|
||||
Workspace,
|
||||
Project,
|
||||
User,
|
||||
)
|
||||
from plane.settings.storage import S3Storage
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.utils.cache import invalidate_cache_directly
|
||||
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
|
||||
|
||||
|
||||
class UserAssetsV2Endpoint(BaseAPIView):
|
||||
"""This endpoint is used to upload user profile images."""
|
||||
|
||||
def asset_delete(self, asset_id):
|
||||
asset = FileAsset.objects.filter(id=asset_id).first()
|
||||
if asset is None:
|
||||
return
|
||||
asset.is_deleted = True
|
||||
asset.deleted_at = timezone.now()
|
||||
asset.save(update_fields=["is_deleted", "deleted_at"])
|
||||
return
|
||||
|
||||
def entity_asset_save(self, asset_id, entity_type, asset, request):
|
||||
# User Avatar
|
||||
if entity_type == FileAsset.EntityTypeContext.USER_AVATAR:
|
||||
user = User.objects.get(id=asset.user_id)
|
||||
user.avatar = ""
|
||||
# Delete the previous avatar
|
||||
if user.avatar_asset_id:
|
||||
self.asset_delete(user.avatar_asset_id)
|
||||
# Save the new avatar
|
||||
user.avatar_asset_id = asset_id
|
||||
user.save()
|
||||
invalidate_cache_directly(
|
||||
path="/api/users/me/",
|
||||
url_params=False,
|
||||
user=True,
|
||||
request=request,
|
||||
)
|
||||
invalidate_cache_directly(
|
||||
path="/api/users/me/settings/",
|
||||
url_params=False,
|
||||
user=True,
|
||||
request=request,
|
||||
)
|
||||
return
|
||||
# User Cover
|
||||
if entity_type == FileAsset.EntityTypeContext.USER_COVER:
|
||||
user = User.objects.get(id=asset.user_id)
|
||||
user.cover_image = None
|
||||
# Delete the previous cover image
|
||||
if user.cover_image_asset_id:
|
||||
self.asset_delete(user.cover_image_asset_id)
|
||||
# Save the new cover image
|
||||
user.cover_image_asset_id = asset_id
|
||||
user.save()
|
||||
invalidate_cache_directly(
|
||||
path="/api/users/me/",
|
||||
url_params=False,
|
||||
user=True,
|
||||
request=request,
|
||||
)
|
||||
invalidate_cache_directly(
|
||||
path="/api/users/me/settings/",
|
||||
url_params=False,
|
||||
user=True,
|
||||
request=request,
|
||||
)
|
||||
return
|
||||
return
|
||||
|
||||
def entity_asset_delete(self, entity_type, asset, request):
|
||||
# User Avatar
|
||||
if entity_type == FileAsset.EntityTypeContext.USER_AVATAR:
|
||||
user = User.objects.get(id=asset.user_id)
|
||||
user.avatar_asset_id = None
|
||||
user.save()
|
||||
invalidate_cache_directly(
|
||||
path="/api/users/me/",
|
||||
url_params=False,
|
||||
user=True,
|
||||
request=request,
|
||||
)
|
||||
invalidate_cache_directly(
|
||||
path="/api/users/me/settings/",
|
||||
url_params=False,
|
||||
user=True,
|
||||
request=request,
|
||||
)
|
||||
return
|
||||
# User Cover
|
||||
if entity_type == FileAsset.EntityTypeContext.USER_COVER:
|
||||
user = User.objects.get(id=asset.user_id)
|
||||
user.cover_image_asset_id = None
|
||||
user.save()
|
||||
invalidate_cache_directly(
|
||||
path="/api/users/me/",
|
||||
url_params=False,
|
||||
user=True,
|
||||
request=request,
|
||||
)
|
||||
invalidate_cache_directly(
|
||||
path="/api/users/me/settings/",
|
||||
url_params=False,
|
||||
user=True,
|
||||
request=request,
|
||||
)
|
||||
return
|
||||
return
|
||||
|
||||
def post(self, request):
|
||||
# get the asset key
|
||||
name = request.data.get("name")
|
||||
type = request.data.get("type", "image/jpeg")
|
||||
size = int(request.data.get("size", settings.FILE_SIZE_LIMIT))
|
||||
entity_type = request.data.get("entity_type", False)
|
||||
|
||||
# Check if the file size is within the limit
|
||||
size_limit = min(size, settings.FILE_SIZE_LIMIT)
|
||||
|
||||
# Check if the entity type is allowed
|
||||
if not entity_type or entity_type not in ["USER_AVATAR", "USER_COVER"]:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid entity type.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check if the file type is allowed
|
||||
allowed_types = [
|
||||
"image/jpeg",
|
||||
"image/png",
|
||||
"image/webp",
|
||||
"image/jpg",
|
||||
]
|
||||
if type not in allowed_types:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# asset key
|
||||
asset_key = f"{uuid.uuid4().hex}-{name}"
|
||||
|
||||
# Create a File Asset
|
||||
asset = FileAsset.objects.create(
|
||||
attributes={
|
||||
"name": name,
|
||||
"type": type,
|
||||
"size": size_limit,
|
||||
},
|
||||
asset=asset_key,
|
||||
size=size_limit,
|
||||
user=request.user,
|
||||
created_by=request.user,
|
||||
entity_type=entity_type,
|
||||
)
|
||||
|
||||
# Get the presigned URL
|
||||
storage = S3Storage(request=request)
|
||||
# Generate a presigned URL to share an S3 object
|
||||
presigned_url = storage.generate_presigned_post(
|
||||
object_name=asset_key,
|
||||
file_type=type,
|
||||
file_size=size_limit,
|
||||
)
|
||||
# Return the presigned URL
|
||||
return Response(
|
||||
{
|
||||
"upload_data": presigned_url,
|
||||
"asset_id": str(asset.id),
|
||||
"asset_url": asset.asset_url,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def patch(self, request, asset_id):
|
||||
# get the asset id
|
||||
asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id)
|
||||
# get the storage metadata
|
||||
asset.is_uploaded = True
|
||||
# get the storage metadata
|
||||
if not asset.storage_metadata:
|
||||
get_asset_object_metadata.delay(asset_id=str(asset_id))
|
||||
# get the entity and save the asset id for the request field
|
||||
self.entity_asset_save(
|
||||
asset_id=asset_id,
|
||||
entity_type=asset.entity_type,
|
||||
asset=asset,
|
||||
request=request,
|
||||
)
|
||||
# update the attributes
|
||||
asset.attributes = request.data.get("attributes", asset.attributes)
|
||||
# save the asset
|
||||
asset.save(update_fields=["is_uploaded", "attributes"])
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def delete(self, request, asset_id):
|
||||
asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id)
|
||||
asset.is_deleted = True
|
||||
asset.deleted_at = timezone.now()
|
||||
# get the entity and save the asset id for the request field
|
||||
self.entity_asset_delete(
|
||||
entity_type=asset.entity_type, asset=asset, request=request
|
||||
)
|
||||
asset.save(update_fields=["is_deleted", "deleted_at"])
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class WorkspaceFileAssetEndpoint(BaseAPIView):
|
||||
"""This endpoint is used to upload cover images/logos etc for workspace, projects and users."""
|
||||
|
||||
def get_entity_id_field(self, entity_type, entity_id):
|
||||
# Workspace Logo
|
||||
if entity_type == FileAsset.EntityTypeContext.WORKSPACE_LOGO:
|
||||
return {
|
||||
"workspace_id": entity_id,
|
||||
}
|
||||
|
||||
# Project Cover
|
||||
if entity_type == FileAsset.EntityTypeContext.PROJECT_COVER:
|
||||
return {
|
||||
"project_id": entity_id,
|
||||
}
|
||||
|
||||
# User Avatar and Cover
|
||||
if entity_type in [
|
||||
FileAsset.EntityTypeContext.USER_AVATAR,
|
||||
FileAsset.EntityTypeContext.USER_COVER,
|
||||
]:
|
||||
return {
|
||||
"user_id": entity_id,
|
||||
}
|
||||
|
||||
# Issue Attachment and Description
|
||||
if entity_type in [
|
||||
FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
FileAsset.EntityTypeContext.ISSUE_DESCRIPTION,
|
||||
]:
|
||||
return {
|
||||
"issue_id": entity_id,
|
||||
}
|
||||
|
||||
# Page Description
|
||||
if entity_type == FileAsset.EntityTypeContext.PAGE_DESCRIPTION:
|
||||
return {
|
||||
"page_id": entity_id,
|
||||
}
|
||||
|
||||
# Comment Description
|
||||
if entity_type == FileAsset.EntityTypeContext.COMMENT_DESCRIPTION:
|
||||
return {
|
||||
"comment_id": entity_id,
|
||||
}
|
||||
return {}
|
||||
|
||||
def asset_delete(self, asset_id):
|
||||
asset = FileAsset.objects.filter(id=asset_id).first()
|
||||
# Check if the asset exists
|
||||
if asset is None:
|
||||
return
|
||||
# Mark the asset as deleted
|
||||
asset.is_deleted = True
|
||||
asset.deleted_at = timezone.now()
|
||||
asset.save(update_fields=["is_deleted", "deleted_at"])
|
||||
return
|
||||
|
||||
def entity_asset_save(self, asset_id, entity_type, asset, request):
|
||||
# Workspace Logo
|
||||
if entity_type == FileAsset.EntityTypeContext.WORKSPACE_LOGO:
|
||||
workspace = Workspace.objects.filter(id=asset.workspace_id).first()
|
||||
if workspace is None:
|
||||
return
|
||||
# Delete the previous logo
|
||||
if workspace.logo_asset_id:
|
||||
self.asset_delete(workspace.logo_asset_id)
|
||||
# Save the new logo
|
||||
workspace.logo = ""
|
||||
workspace.logo_asset_id = asset_id
|
||||
workspace.save()
|
||||
invalidate_cache_directly(
|
||||
path="/api/workspaces/",
|
||||
url_params=False,
|
||||
user=False,
|
||||
request=request,
|
||||
)
|
||||
invalidate_cache_directly(
|
||||
path="/api/users/me/workspaces/",
|
||||
url_params=False,
|
||||
user=True,
|
||||
request=request,
|
||||
)
|
||||
invalidate_cache_directly(
|
||||
path="/api/instances/",
|
||||
url_params=False,
|
||||
user=False,
|
||||
request=request,
|
||||
)
|
||||
return
|
||||
|
||||
# Project Cover
|
||||
elif entity_type == FileAsset.EntityTypeContext.PROJECT_COVER:
|
||||
project = Project.objects.filter(id=asset.project_id).first()
|
||||
if project is None:
|
||||
return
|
||||
# Delete the previous cover image
|
||||
if project.cover_image_asset_id:
|
||||
self.asset_delete(project.cover_image_asset_id)
|
||||
# Save the new cover image
|
||||
project.cover_image = ""
|
||||
project.cover_image_asset_id = asset_id
|
||||
project.save()
|
||||
return
|
||||
else:
|
||||
return
|
||||
|
||||
def entity_asset_delete(self, entity_type, asset, request):
|
||||
# Workspace Logo
|
||||
if entity_type == FileAsset.EntityTypeContext.WORKSPACE_LOGO:
|
||||
workspace = Workspace.objects.get(id=asset.workspace_id)
|
||||
if workspace is None:
|
||||
return
|
||||
workspace.logo_asset_id = None
|
||||
workspace.save()
|
||||
invalidate_cache_directly(
|
||||
path="/api/workspaces/",
|
||||
url_params=False,
|
||||
user=False,
|
||||
request=request,
|
||||
)
|
||||
invalidate_cache_directly(
|
||||
path="/api/users/me/workspaces/",
|
||||
url_params=False,
|
||||
user=True,
|
||||
request=request,
|
||||
)
|
||||
invalidate_cache_directly(
|
||||
path="/api/instances/",
|
||||
url_params=False,
|
||||
user=False,
|
||||
request=request,
|
||||
)
|
||||
return
|
||||
# Project Cover
|
||||
elif entity_type == FileAsset.EntityTypeContext.PROJECT_COVER:
|
||||
project = Project.objects.filter(id=asset.project_id).first()
|
||||
if project is None:
|
||||
return
|
||||
project.cover_image_asset_id = None
|
||||
project.save()
|
||||
return
|
||||
else:
|
||||
return
|
||||
|
||||
def post(self, request, slug):
|
||||
name = request.data.get("name")
|
||||
type = request.data.get("type", "image/jpeg")
|
||||
size = int(request.data.get("size", settings.FILE_SIZE_LIMIT))
|
||||
entity_type = request.data.get("entity_type")
|
||||
entity_identifier = request.data.get("entity_identifier", False)
|
||||
|
||||
# Check if the entity type is allowed
|
||||
if entity_type not in FileAsset.EntityTypeContext.values:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid entity type.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check if the file type is allowed
|
||||
allowed_types = [
|
||||
"image/jpeg",
|
||||
"image/png",
|
||||
"image/webp",
|
||||
"image/jpg",
|
||||
"image/gif",
|
||||
]
|
||||
if type not in allowed_types:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the size limit
|
||||
size_limit = min(settings.FILE_SIZE_LIMIT, size)
|
||||
|
||||
# Get the workspace
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
# asset key
|
||||
asset_key = f"{workspace.id}/{uuid.uuid4().hex}-{name}"
|
||||
|
||||
# Create a File Asset
|
||||
asset = FileAsset.objects.create(
|
||||
attributes={
|
||||
"name": name,
|
||||
"type": type,
|
||||
"size": size_limit,
|
||||
},
|
||||
asset=asset_key,
|
||||
size=size_limit,
|
||||
workspace=workspace,
|
||||
created_by=request.user,
|
||||
entity_type=entity_type,
|
||||
**self.get_entity_id_field(
|
||||
entity_type=entity_type, entity_id=entity_identifier
|
||||
),
|
||||
)
|
||||
|
||||
# Get the presigned URL
|
||||
storage = S3Storage(request=request)
|
||||
# Generate a presigned URL to share an S3 object
|
||||
presigned_url = storage.generate_presigned_post(
|
||||
object_name=asset_key,
|
||||
file_type=type,
|
||||
file_size=size_limit,
|
||||
)
|
||||
# Return the presigned URL
|
||||
return Response(
|
||||
{
|
||||
"upload_data": presigned_url,
|
||||
"asset_id": str(asset.id),
|
||||
"asset_url": asset.asset_url,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def patch(self, request, slug, asset_id):
|
||||
# get the asset id
|
||||
asset = FileAsset.objects.get(id=asset_id, workspace__slug=slug)
|
||||
# get the storage metadata
|
||||
asset.is_uploaded = True
|
||||
# get the storage metadata
|
||||
if not asset.storage_metadata:
|
||||
get_asset_object_metadata.delay(asset_id=str(asset_id))
|
||||
# get the entity and save the asset id for the request field
|
||||
self.entity_asset_save(
|
||||
asset_id=asset_id,
|
||||
entity_type=asset.entity_type,
|
||||
asset=asset,
|
||||
request=request,
|
||||
)
|
||||
# update the attributes
|
||||
asset.attributes = request.data.get("attributes", asset.attributes)
|
||||
# save the asset
|
||||
asset.save(update_fields=["is_uploaded", "attributes"])
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def delete(self, request, slug, asset_id):
|
||||
asset = FileAsset.objects.get(id=asset_id, workspace__slug=slug)
|
||||
asset.is_deleted = True
|
||||
asset.deleted_at = timezone.now()
|
||||
# get the entity and save the asset id for the request field
|
||||
self.entity_asset_delete(
|
||||
entity_type=asset.entity_type, asset=asset, request=request
|
||||
)
|
||||
asset.save(update_fields=["is_deleted", "deleted_at"])
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def get(self, request, slug, asset_id):
|
||||
# get the asset id
|
||||
asset = FileAsset.objects.get(id=asset_id, workspace__slug=slug)
|
||||
|
||||
# Check if the asset is uploaded
|
||||
if not asset.is_uploaded:
|
||||
return Response(
|
||||
{
|
||||
"error": "The requested asset could not be found.",
|
||||
},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
# Get the presigned URL
|
||||
storage = S3Storage(request=request)
|
||||
# Generate a presigned URL to share an S3 object
|
||||
signed_url = storage.generate_presigned_url(
|
||||
object_name=asset.asset.name,
|
||||
)
|
||||
# Redirect to the signed URL
|
||||
return HttpResponseRedirect(signed_url)
|
||||
|
||||
|
||||
class StaticFileAssetEndpoint(BaseAPIView):
|
||||
"""This endpoint is used to get the signed URL for a static asset."""
|
||||
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def get(self, request, asset_id):
|
||||
# get the asset id
|
||||
asset = FileAsset.objects.get(id=asset_id)
|
||||
|
||||
# Check if the asset is uploaded
|
||||
if not asset.is_uploaded:
|
||||
return Response(
|
||||
{
|
||||
"error": "The requested asset could not be found.",
|
||||
},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
# Check if the entity type is allowed
|
||||
if asset.entity_type not in [
|
||||
FileAsset.EntityTypeContext.USER_AVATAR,
|
||||
FileAsset.EntityTypeContext.USER_COVER,
|
||||
FileAsset.EntityTypeContext.WORKSPACE_LOGO,
|
||||
FileAsset.EntityTypeContext.PROJECT_COVER,
|
||||
]:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid entity type.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the presigned URL
|
||||
storage = S3Storage(request=request)
|
||||
# Generate a presigned URL to share an S3 object
|
||||
signed_url = storage.generate_presigned_url(
|
||||
object_name=asset.asset.name,
|
||||
)
|
||||
# Redirect to the signed URL
|
||||
return HttpResponseRedirect(signed_url)
|
||||
|
||||
|
||||
class AssetRestoreEndpoint(BaseAPIView):
|
||||
"""Endpoint to restore a deleted assets."""
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE")
|
||||
def post(self, request, slug, asset_id):
|
||||
asset = FileAsset.all_objects.get(id=asset_id, workspace__slug=slug)
|
||||
asset.is_deleted = False
|
||||
asset.deleted_at = None
|
||||
asset.save(update_fields=["is_deleted", "deleted_at"])
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class ProjectAssetEndpoint(BaseAPIView):
|
||||
"""This endpoint is used to upload cover images/logos etc for workspace, projects and users."""
|
||||
|
||||
def get_entity_id_field(self, entity_type, entity_id):
|
||||
if entity_type == FileAsset.EntityTypeContext.WORKSPACE_LOGO:
|
||||
return {
|
||||
"workspace_id": entity_id,
|
||||
}
|
||||
|
||||
if entity_type == FileAsset.EntityTypeContext.PROJECT_COVER:
|
||||
return {
|
||||
"project_id": entity_id,
|
||||
}
|
||||
|
||||
if entity_type in [
|
||||
FileAsset.EntityTypeContext.USER_AVATAR,
|
||||
FileAsset.EntityTypeContext.USER_COVER,
|
||||
]:
|
||||
return {
|
||||
"user_id": entity_id,
|
||||
}
|
||||
|
||||
if entity_type in [
|
||||
FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
FileAsset.EntityTypeContext.ISSUE_DESCRIPTION,
|
||||
]:
|
||||
return {
|
||||
"issue_id": entity_id,
|
||||
}
|
||||
|
||||
if entity_type == FileAsset.EntityTypeContext.PAGE_DESCRIPTION:
|
||||
return {
|
||||
"page_id": entity_id,
|
||||
}
|
||||
|
||||
if entity_type == FileAsset.EntityTypeContext.COMMENT_DESCRIPTION:
|
||||
return {
|
||||
"comment_id": entity_id,
|
||||
}
|
||||
|
||||
if entity_type == FileAsset.EntityTypeContext.DRAFT_ISSUE_DESCRIPTION:
|
||||
return {
|
||||
"draft_issue_id": entity_id,
|
||||
}
|
||||
return {}
|
||||
|
||||
@allow_permission(
|
||||
[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST],
|
||||
)
|
||||
def post(self, request, slug, project_id):
|
||||
name = request.data.get("name")
|
||||
type = request.data.get("type", "image/jpeg")
|
||||
size = int(request.data.get("size", settings.FILE_SIZE_LIMIT))
|
||||
entity_type = request.data.get("entity_type", "")
|
||||
entity_identifier = request.data.get("entity_identifier")
|
||||
|
||||
# Check if the entity type is allowed
|
||||
if entity_type not in FileAsset.EntityTypeContext.values:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid entity type.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check if the file type is allowed
|
||||
allowed_types = [
|
||||
"image/jpeg",
|
||||
"image/png",
|
||||
"image/webp",
|
||||
"image/jpg",
|
||||
"image/gif",
|
||||
]
|
||||
if type not in allowed_types:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the size limit
|
||||
size_limit = min(settings.FILE_SIZE_LIMIT, size)
|
||||
|
||||
# Get the workspace
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
# asset key
|
||||
asset_key = f"{workspace.id}/{uuid.uuid4().hex}-{name}"
|
||||
|
||||
# Create a File Asset
|
||||
asset = FileAsset.objects.create(
|
||||
attributes={
|
||||
"name": name,
|
||||
"type": type,
|
||||
"size": size_limit,
|
||||
},
|
||||
asset=asset_key,
|
||||
size=size_limit,
|
||||
workspace=workspace,
|
||||
created_by=request.user,
|
||||
entity_type=entity_type,
|
||||
project_id=project_id,
|
||||
**self.get_entity_id_field(entity_type, entity_identifier),
|
||||
)
|
||||
|
||||
# Get the presigned URL
|
||||
storage = S3Storage(request=request)
|
||||
# Generate a presigned URL to share an S3 object
|
||||
presigned_url = storage.generate_presigned_post(
|
||||
object_name=asset_key,
|
||||
file_type=type,
|
||||
file_size=size_limit,
|
||||
)
|
||||
# Return the presigned URL
|
||||
return Response(
|
||||
{
|
||||
"upload_data": presigned_url,
|
||||
"asset_id": str(asset.id),
|
||||
"asset_url": asset.asset_url,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@allow_permission(
|
||||
[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST],
|
||||
)
|
||||
def patch(self, request, slug, project_id, pk):
|
||||
# get the asset id
|
||||
asset = FileAsset.objects.get(
|
||||
id=pk,
|
||||
)
|
||||
# get the storage metadata
|
||||
asset.is_uploaded = True
|
||||
# get the storage metadata
|
||||
if not asset.storage_metadata:
|
||||
get_asset_object_metadata.delay(asset_id=str(pk))
|
||||
|
||||
# update the attributes
|
||||
asset.attributes = request.data.get("attributes", asset.attributes)
|
||||
# save the asset
|
||||
asset.save(update_fields=["is_uploaded", "attributes"])
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def delete(self, request, slug, project_id, pk):
|
||||
# Get the asset
|
||||
asset = FileAsset.objects.get(
|
||||
id=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
# Check deleted assets
|
||||
asset.is_deleted = True
|
||||
asset.deleted_at = timezone.now()
|
||||
# Save the asset
|
||||
asset.save(update_fields=["is_deleted", "deleted_at"])
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def get(self, request, slug, project_id, pk):
|
||||
# get the asset id
|
||||
asset = FileAsset.objects.get(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
pk=pk,
|
||||
)
|
||||
|
||||
# Check if the asset is uploaded
|
||||
if not asset.is_uploaded:
|
||||
return Response(
|
||||
{
|
||||
"error": "The requested asset could not be found.",
|
||||
},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
# Get the presigned URL
|
||||
storage = S3Storage(request=request)
|
||||
# Generate a presigned URL to share an S3 object
|
||||
signed_url = storage.generate_presigned_url(
|
||||
object_name=asset.asset.name,
|
||||
)
|
||||
# Redirect to the signed URL
|
||||
return HttpResponseRedirect(signed_url)
|
||||
|
||||
|
||||
class ProjectBulkAssetEndpoint(BaseAPIView):
|
||||
|
||||
def save_project_cover(self, asset, project_id):
|
||||
project = Project.objects.get(id=project_id)
|
||||
project.cover_image_asset_id = asset.id
|
||||
project.save()
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def post(self, request, slug, project_id, entity_id):
|
||||
asset_ids = request.data.get("asset_ids", [])
|
||||
|
||||
# Check if the asset ids are provided
|
||||
if not asset_ids:
|
||||
return Response(
|
||||
{
|
||||
"error": "No asset ids provided.",
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# get the asset id
|
||||
assets = FileAsset.objects.filter(
|
||||
id__in=asset_ids,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
# Get the first asset
|
||||
asset = assets.first()
|
||||
|
||||
if not asset:
|
||||
return Response(
|
||||
{
|
||||
"error": "The requested asset could not be found.",
|
||||
},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
# Check if the asset is uploaded
|
||||
if asset.entity_type == FileAsset.EntityTypeContext.PROJECT_COVER:
|
||||
assets.update(
|
||||
project_id=project_id,
|
||||
)
|
||||
[self.save_project_cover(asset, project_id) for asset in assets]
|
||||
|
||||
if asset.entity_type == FileAsset.EntityTypeContext.ISSUE_DESCRIPTION:
|
||||
assets.update(
|
||||
issue_id=entity_id,
|
||||
)
|
||||
|
||||
if (
|
||||
asset.entity_type
|
||||
== FileAsset.EntityTypeContext.COMMENT_DESCRIPTION
|
||||
):
|
||||
assets.update(
|
||||
comment_id=entity_id,
|
||||
)
|
||||
|
||||
if asset.entity_type == FileAsset.EntityTypeContext.PAGE_DESCRIPTION:
|
||||
assets.update(
|
||||
page_id=entity_id,
|
||||
)
|
||||
|
||||
if (
|
||||
asset.entity_type
|
||||
== FileAsset.EntityTypeContext.DRAFT_ISSUE_DESCRIPTION
|
||||
):
|
||||
assets.update(
|
||||
draft_issue_id=entity_id,
|
||||
)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -1,6 +1,7 @@
|
||||
# Django imports
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models import (
|
||||
Case,
|
||||
CharField,
|
||||
@@ -18,13 +19,13 @@ from django.db.models import (
|
||||
Sum,
|
||||
FloatField,
|
||||
)
|
||||
from django.db.models.functions import Coalesce, Cast
|
||||
from django.db.models.functions import Coalesce, Cast, Concat
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.db.models import Cycle, UserFavorite, Issue, Label, User, Project
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
|
||||
@@ -34,10 +35,6 @@ from .. import BaseAPIView
|
||||
|
||||
class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
favorite_subquery = UserFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
@@ -51,6 +48,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="backlog",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
@@ -65,6 +63,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="unstarted",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
@@ -79,6 +78,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="started",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
@@ -93,6 +93,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="cancelled",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
@@ -107,6 +108,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="completed",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
@@ -120,6 +122,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
@@ -143,7 +146,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
Prefetch(
|
||||
"issue_cycle__issue__assignees",
|
||||
queryset=User.objects.only(
|
||||
"avatar", "first_name", "id"
|
||||
"avatar_asset", "first_name", "id"
|
||||
).distinct(),
|
||||
)
|
||||
)
|
||||
@@ -163,6 +166,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
filter=Q(
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -174,6 +178,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -185,6 +190,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="cancelled",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -196,6 +202,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -207,6 +214,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="unstarted",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -218,6 +226,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__state__group="backlog",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -292,6 +301,12 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
.distinct()
|
||||
)
|
||||
|
||||
@allow_permission(
|
||||
[
|
||||
ROLE.ADMIN,
|
||||
ROLE.MEMBER,
|
||||
]
|
||||
)
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
if pk is None:
|
||||
queryset = (
|
||||
@@ -339,6 +354,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
parent__isnull=False,
|
||||
issue_cycle__cycle_id=pk,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
@@ -393,13 +409,33 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
assignee_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=pk,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.values("display_name", "assignee_id", "avatar")
|
||||
.annotate(
|
||||
avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(
|
||||
assignees__avatar_asset__isnull=True,
|
||||
then="assignees__avatar",
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.values("display_name", "assignee_id", "avatar_url")
|
||||
.annotate(
|
||||
total_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField())
|
||||
@@ -431,6 +467,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
label_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=pk,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
@@ -486,19 +523,39 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
assignee_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=pk,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(first_name=F("assignees__first_name"))
|
||||
.annotate(last_name=F("assignees__last_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.annotate(
|
||||
avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(
|
||||
assignees__avatar_asset__isnull=True,
|
||||
then="assignees__avatar",
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.values(
|
||||
"first_name",
|
||||
"last_name",
|
||||
"assignee_id",
|
||||
"avatar",
|
||||
"avatar_url",
|
||||
"display_name",
|
||||
)
|
||||
.annotate(
|
||||
@@ -537,6 +594,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
label_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=pk,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
@@ -596,12 +654,13 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def post(self, request, slug, project_id, cycle_id):
|
||||
cycle = Cycle.objects.get(
|
||||
pk=cycle_id, project_id=project_id, workspace__slug=slug
|
||||
)
|
||||
|
||||
if cycle.end_date >= timezone.now().date():
|
||||
if cycle.end_date >= timezone.now():
|
||||
return Response(
|
||||
{"error": "Only completed cycles can be archived"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -609,11 +668,18 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
|
||||
cycle.archived_at = timezone.now()
|
||||
cycle.save()
|
||||
UserFavorite.objects.filter(
|
||||
entity_type="cycle",
|
||||
entity_identifier=cycle_id,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
).delete()
|
||||
return Response(
|
||||
{"archived_at": str(cycle.archived_at)},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def delete(self, request, slug, project_id, cycle_id):
|
||||
cycle = Cycle.objects.get(
|
||||
pk=cycle_id, project_id=project_id, workspace__slug=slug
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,12 +3,7 @@ import json
|
||||
|
||||
# Django imports
|
||||
from django.core import serializers
|
||||
from django.db.models import (
|
||||
F,
|
||||
Func,
|
||||
OuterRef,
|
||||
Q,
|
||||
)
|
||||
from django.db.models import F, Func, OuterRef, Q, Subquery
|
||||
from django.utils import timezone
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
@@ -17,21 +12,18 @@ from django.views.decorators.gzip import gzip_page
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from plane.app.permissions import (
|
||||
ProjectEntityPermission,
|
||||
)
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet
|
||||
from plane.app.serializers import (
|
||||
CycleIssueSerializer,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
CycleIssue,
|
||||
Issue,
|
||||
IssueAttachment,
|
||||
FileAsset,
|
||||
IssueLink,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
@@ -45,6 +37,8 @@ from plane.utils.paginator import (
|
||||
GroupedOffsetPaginator,
|
||||
SubGroupedOffsetPaginator,
|
||||
)
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
|
||||
|
||||
|
||||
class CycleIssueViewSet(BaseViewSet):
|
||||
@@ -54,10 +48,6 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
webhook_event = "cycle_issue"
|
||||
bulk = True
|
||||
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
filterset_fields = [
|
||||
"issue__labels__id",
|
||||
"issue__assignees__id",
|
||||
@@ -92,11 +82,20 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
@allow_permission(
|
||||
[
|
||||
ROLE.ADMIN,
|
||||
ROLE.MEMBER,
|
||||
]
|
||||
)
|
||||
def list(self, request, slug, project_id, cycle_id):
|
||||
order_by_param = request.GET.get("order_by", "created_at")
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issue_queryset = (
|
||||
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
)
|
||||
.filter(project_id=project_id)
|
||||
.filter(workspace__slug=slug)
|
||||
.filter(**filters)
|
||||
@@ -108,7 +107,13 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
"issue_cycle__cycle",
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(
|
||||
cycle_id=Subquery(
|
||||
CycleIssue.objects.filter(
|
||||
issue=OuterRef("id"), deleted_at__isnull=True
|
||||
).values("cycle_id")[:1]
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
@@ -116,8 +121,9 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
attachment_count=FileAsset.objects.filter(
|
||||
issue_id=OuterRef("id"),
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
@@ -190,10 +196,10 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
group_by_field_name=group_by,
|
||||
sub_group_by_field_name=sub_group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
Q(issue_intake__status=1)
|
||||
| Q(issue_intake__status=-1)
|
||||
| Q(issue_intake__status=2)
|
||||
| Q(issue_intake__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
@@ -219,10 +225,10 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
Q(issue_intake__status=1)
|
||||
| Q(issue_intake__status=-1)
|
||||
| Q(issue_intake__status=2)
|
||||
| Q(issue_intake__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
@@ -238,6 +244,7 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
),
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def create(self, request, slug, project_id, cycle_id):
|
||||
issues = request.data.get("issues", [])
|
||||
|
||||
@@ -251,10 +258,7 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
workspace__slug=slug, project_id=project_id, pk=cycle_id
|
||||
)
|
||||
|
||||
if (
|
||||
cycle.end_date is not None
|
||||
and cycle.end_date < timezone.now().date()
|
||||
):
|
||||
if cycle.end_date is not None and cycle.end_date < timezone.now():
|
||||
return Response(
|
||||
{
|
||||
"error": "The Cycle has already been completed so no new issues can be added"
|
||||
@@ -333,6 +337,7 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
)
|
||||
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def destroy(self, request, slug, project_id, cycle_id, issue_id):
|
||||
cycle_issue = CycleIssue.objects.filter(
|
||||
issue_id=issue_id,
|
||||
|
||||
@@ -36,13 +36,13 @@ from plane.db.models import (
|
||||
DashboardWidget,
|
||||
Issue,
|
||||
IssueActivity,
|
||||
IssueAttachment,
|
||||
FileAsset,
|
||||
IssueLink,
|
||||
IssueRelation,
|
||||
Project,
|
||||
ProjectMember,
|
||||
User,
|
||||
Widget,
|
||||
WorkspaceMember,
|
||||
CycleIssue,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
@@ -51,36 +51,112 @@ from .. import BaseAPIView
|
||||
|
||||
|
||||
def dashboard_overview_stats(self, request, slug):
|
||||
assigned_issues = Issue.issue_objects.filter(
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
workspace__slug=slug,
|
||||
assignees__in=[request.user],
|
||||
).count()
|
||||
assigned_issues = (
|
||||
Issue.issue_objects.filter(
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
workspace__slug=slug,
|
||||
assignees__in=[request.user],
|
||||
)
|
||||
.filter(
|
||||
Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=True,
|
||||
)
|
||||
| Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=False,
|
||||
created_by=self.request.user,
|
||||
)
|
||||
|
|
||||
# For other roles (role < 5), show all issues
|
||||
Q(project__project_projectmember__role__gt=5),
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.count()
|
||||
)
|
||||
|
||||
pending_issues_count = Issue.issue_objects.filter(
|
||||
~Q(state__group__in=["completed", "cancelled"]),
|
||||
target_date__lt=timezone.now().date(),
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
workspace__slug=slug,
|
||||
assignees__in=[request.user],
|
||||
).count()
|
||||
pending_issues_count = (
|
||||
Issue.issue_objects.filter(
|
||||
~Q(state__group__in=["completed", "cancelled"]),
|
||||
target_date__lt=timezone.now().date(),
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
workspace__slug=slug,
|
||||
assignees__in=[request.user],
|
||||
)
|
||||
.filter(
|
||||
Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=True,
|
||||
)
|
||||
| Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=False,
|
||||
created_by=self.request.user,
|
||||
)
|
||||
|
|
||||
# For other roles (role < 5), show all issues
|
||||
Q(project__project_projectmember__role__gt=5),
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.count()
|
||||
)
|
||||
|
||||
created_issues_count = Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
created_by_id=request.user.id,
|
||||
).count()
|
||||
created_issues_count = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
created_by_id=request.user.id,
|
||||
)
|
||||
.filter(
|
||||
Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=True,
|
||||
)
|
||||
| Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=False,
|
||||
created_by=self.request.user,
|
||||
)
|
||||
|
|
||||
# For other roles (role < 5), show all issues
|
||||
Q(project__project_projectmember__role__gt=5),
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.count()
|
||||
)
|
||||
|
||||
completed_issues_count = Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
assignees__in=[request.user],
|
||||
state__group="completed",
|
||||
).count()
|
||||
completed_issues_count = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
assignees__in=[request.user],
|
||||
state__group="completed",
|
||||
)
|
||||
.filter(
|
||||
Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=True,
|
||||
)
|
||||
| Q(
|
||||
project__project_projectmember__role=5,
|
||||
project__guest_view_all_features=False,
|
||||
created_by=self.request.user,
|
||||
)
|
||||
|
|
||||
# For other roles (role < 5), show all issues
|
||||
Q(project__project_projectmember__role__gt=5),
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.count()
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
@@ -116,7 +192,13 @@ def dashboard_assigned_issues(self, request, slug):
|
||||
).select_related("issue"),
|
||||
)
|
||||
)
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(
|
||||
cycle_id=Subquery(
|
||||
CycleIssue.objects.filter(
|
||||
issue=OuterRef("id"), deleted_at__isnull=True
|
||||
).values("cycle_id")[:1]
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
@@ -124,8 +206,9 @@ def dashboard_assigned_issues(self, request, slug):
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
attachment_count=FileAsset.objects.filter(
|
||||
issue_id=OuterRef("id"),
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
@@ -142,7 +225,10 @@ def dashboard_assigned_issues(self, request, slug):
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
filter=Q(
|
||||
~Q(labels__id__isnull=True)
|
||||
& Q(label_issue__deleted_at__isnull=True),
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
@@ -150,8 +236,11 @@ def dashboard_assigned_issues(self, request, slug):
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
filter=Q(
|
||||
~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True)
|
||||
& Q(issue_assignee__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
@@ -159,13 +248,25 @@ def dashboard_assigned_issues(self, request, slug):
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
filter=Q(
|
||||
~Q(issue_module__module_id__isnull=True)
|
||||
& Q(issue_module__module__archived_at__isnull=True)
|
||||
& Q(issue_module__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
if WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=5,
|
||||
is_active=True,
|
||||
).exists():
|
||||
assigned_issues = assigned_issues.filter(created_by=request.user)
|
||||
|
||||
# Priority Ordering
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
assigned_issues = assigned_issues.annotate(
|
||||
@@ -271,7 +372,13 @@ def dashboard_created_issues(self, request, slug):
|
||||
.filter(**filters)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels", "issue_module__module")
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(
|
||||
cycle_id=Subquery(
|
||||
CycleIssue.objects.filter(
|
||||
issue=OuterRef("id"), deleted_at__isnull=True
|
||||
).values("cycle_id")[:1]
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
@@ -279,8 +386,9 @@ def dashboard_created_issues(self, request, slug):
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
attachment_count=FileAsset.objects.filter(
|
||||
issue_id=OuterRef("id"),
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
@@ -297,7 +405,10 @@ def dashboard_created_issues(self, request, slug):
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
filter=Q(
|
||||
~Q(labels__id__isnull=True)
|
||||
& Q(label_issue__deleted_at__isnull=True),
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
@@ -305,8 +416,11 @@ def dashboard_created_issues(self, request, slug):
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
filter=Q(
|
||||
~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True)
|
||||
& Q(issue_assignee__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
@@ -314,7 +428,11 @@ def dashboard_created_issues(self, request, slug):
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
filter=Q(
|
||||
~Q(issue_module__module_id__isnull=True)
|
||||
& Q(issue_module__module__archived_at__isnull=True)
|
||||
& Q(issue_module__deleted_at__isnull=True)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
@@ -409,6 +527,16 @@ def dashboard_created_issues(self, request, slug):
|
||||
def dashboard_issues_by_state_groups(self, request, slug):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||
extra_filters = {}
|
||||
|
||||
if WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=5,
|
||||
is_active=True,
|
||||
).exists():
|
||||
extra_filters = {"created_by": request.user}
|
||||
|
||||
issues_by_state_groups = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
@@ -416,7 +544,7 @@ def dashboard_issues_by_state_groups(self, request, slug):
|
||||
project__project_projectmember__member=request.user,
|
||||
assignees__in=[request.user],
|
||||
)
|
||||
.filter(**filters)
|
||||
.filter(**filters, **extra_filters)
|
||||
.values("state__group")
|
||||
.annotate(count=Count("id"))
|
||||
)
|
||||
@@ -439,6 +567,15 @@ def dashboard_issues_by_state_groups(self, request, slug):
|
||||
def dashboard_issues_by_priority(self, request, slug):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
extra_filters = {}
|
||||
|
||||
if WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=5,
|
||||
is_active=True,
|
||||
).exists():
|
||||
extra_filters = {"created_by": request.user}
|
||||
|
||||
issues_by_priority = (
|
||||
Issue.issue_objects.filter(
|
||||
@@ -447,7 +584,7 @@ def dashboard_issues_by_priority(self, request, slug):
|
||||
project__project_projectmember__member=request.user,
|
||||
assignees__in=[request.user],
|
||||
)
|
||||
.filter(**filters)
|
||||
.filter(**filters, **extra_filters)
|
||||
.values("priority")
|
||||
.annotate(count=Count("id"))
|
||||
)
|
||||
@@ -521,105 +658,42 @@ def dashboard_recent_projects(self, request, slug):
|
||||
|
||||
|
||||
def dashboard_recent_collaborators(self, request, slug):
|
||||
# Subquery to count activities for each project member
|
||||
activity_count_subquery = (
|
||||
IssueActivity.objects.filter(
|
||||
workspace__slug=slug,
|
||||
actor=OuterRef("member"),
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__archived_at__isnull=True,
|
||||
)
|
||||
.values("actor")
|
||||
.annotate(num_activities=Count("pk"))
|
||||
.values("num_activities")
|
||||
)
|
||||
|
||||
# Get all project members and annotate them with activity counts
|
||||
project_members_with_activities = (
|
||||
ProjectMember.objects.filter(
|
||||
WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__archived_at__isnull=True,
|
||||
is_active=True,
|
||||
)
|
||||
.annotate(
|
||||
num_activities=Coalesce(
|
||||
Subquery(activity_count_subquery),
|
||||
Value(0),
|
||||
output_field=IntegerField(),
|
||||
),
|
||||
is_current_user=Case(
|
||||
When(member=request.user, then=Value(0)),
|
||||
default=Value(1),
|
||||
output_field=IntegerField(),
|
||||
active_issue_count=Count(
|
||||
Case(
|
||||
When(
|
||||
member__issue_assignee__issue__state__group__in=[
|
||||
"unstarted",
|
||||
"started",
|
||||
],
|
||||
member__issue_assignee__issue__workspace__slug=slug,
|
||||
member__issue_assignee__issue__project__project_projectmember__member=request.user,
|
||||
member__issue_assignee__issue__project__project_projectmember__is_active=True,
|
||||
then=F("member__issue_assignee__issue__id"),
|
||||
),
|
||||
distinct=True,
|
||||
output_field=IntegerField(),
|
||||
),
|
||||
distinct=True,
|
||||
),
|
||||
user_id=F("member_id"),
|
||||
)
|
||||
.values_list("member", flat=True)
|
||||
.order_by("is_current_user", "-num_activities")
|
||||
.values("user_id", "active_issue_count")
|
||||
.order_by("-active_issue_count")
|
||||
.distinct()
|
||||
)
|
||||
search = request.query_params.get("search", None)
|
||||
if search:
|
||||
project_members_with_activities = (
|
||||
project_members_with_activities.filter(
|
||||
Q(member__display_name__icontains=search)
|
||||
| Q(member__first_name__icontains=search)
|
||||
| Q(member__last_name__icontains=search)
|
||||
)
|
||||
)
|
||||
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=project_members_with_activities,
|
||||
controller=lambda qs: self.get_results_controller(qs, slug),
|
||||
return Response(
|
||||
(project_members_with_activities),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class DashboardEndpoint(BaseAPIView):
|
||||
def get_results_controller(self, project_members_with_activities, slug):
|
||||
user_active_issue_counts = (
|
||||
User.objects.filter(
|
||||
id__in=project_members_with_activities,
|
||||
)
|
||||
.annotate(
|
||||
active_issue_count=Count(
|
||||
Case(
|
||||
When(
|
||||
issue_assignee__issue__state__group__in=[
|
||||
"unstarted",
|
||||
"started",
|
||||
],
|
||||
issue_assignee__issue__workspace__slug=slug,
|
||||
issue_assignee__issue__project__project_projectmember__is_active=True,
|
||||
then=F("issue_assignee__issue__id"),
|
||||
),
|
||||
output_field=IntegerField(),
|
||||
),
|
||||
distinct=True,
|
||||
)
|
||||
)
|
||||
.values("active_issue_count", user_id=F("id"))
|
||||
)
|
||||
# Create a dictionary to store the active issue counts by user ID
|
||||
active_issue_counts_dict = {
|
||||
user["user_id"]: user["active_issue_count"]
|
||||
for user in user_active_issue_counts
|
||||
}
|
||||
|
||||
# Preserve the sequence of project members with activities
|
||||
paginated_results = [
|
||||
{
|
||||
"user_id": member_id,
|
||||
"active_issue_count": active_issue_counts_dict.get(
|
||||
member_id, 0
|
||||
),
|
||||
}
|
||||
for member_id in project_members_with_activities
|
||||
]
|
||||
return paginated_results
|
||||
|
||||
def create(self, request, slug):
|
||||
serializer = DashboardSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user