forked from github/plane
Compare commits
394 Commits
v0.8-dev
...
feat/csv_e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
93490f518d | ||
|
|
b140f53a8f | ||
|
|
eee6658cc2 | ||
|
|
678e20074b | ||
|
|
68b438ab1a | ||
|
|
b406a70e72 | ||
|
|
b02417120b | ||
|
|
d040394826 | ||
|
|
f7682c57ba | ||
|
|
73d034f74d | ||
|
|
9bb6254515 | ||
|
|
ae052f1890 | ||
|
|
cfc7049343 | ||
|
|
dd4da61042 | ||
|
|
41e55dff85 | ||
|
|
b59fd24158 | ||
|
|
0bccb63a9f | ||
|
|
f60db24503 | ||
|
|
2eb956e97e | ||
|
|
d470adf262 | ||
|
|
cebc8bdc8d | ||
|
|
64b5ba196f | ||
|
|
8d5018318d | ||
|
|
0fbdc0b157 | ||
|
|
2f39181eb7 | ||
|
|
d825dc5579 | ||
|
|
1f8117c987 | ||
|
|
125e9090ea | ||
|
|
02ac4cee22 | ||
|
|
93164755e2 | ||
|
|
6344f6f562 | ||
|
|
b67e30fd9c | ||
|
|
93fec2c678 | ||
|
|
c3c6ba9e34 | ||
|
|
d74ec7bda9 | ||
|
|
e593a8d4bd | ||
|
|
abb8782c44 | ||
|
|
0afd72db95 | ||
|
|
65295f6c6f | ||
|
|
5b6b43fb83 | ||
|
|
f8497125db | ||
|
|
b24622e5ef | ||
|
|
10dface85d | ||
|
|
2b6debaa3e | ||
|
|
1750ba344b | ||
|
|
550473bb02 | ||
|
|
fde978861c | ||
|
|
f44d142f2c | ||
|
|
1ded8f486f | ||
|
|
2c43a15515 | ||
|
|
0979acc1a4 | ||
|
|
9003c58d89 | ||
|
|
55e2f00ffe | ||
|
|
08382f88b4 | ||
|
|
72419447ec | ||
|
|
b554087b1f | ||
|
|
07717e9a93 | ||
|
|
df46a45afc | ||
|
|
e1ae0d3b56 | ||
|
|
daa8f7d79b | ||
|
|
d2cdaaccb9 | ||
|
|
6774eddb66 | ||
|
|
8ccc1b3fcc | ||
|
|
5e76e03a55 | ||
|
|
77fb50faa4 | ||
|
|
5ddfee12bc | ||
|
|
93c92bd70e | ||
|
|
15e644e64e | ||
|
|
f7a596c113 | ||
|
|
f73239be92 | ||
|
|
dc2438b2d3 | ||
|
|
62400f3e3d | ||
|
|
ddd3301d17 | ||
|
|
816f00d956 | ||
|
|
70e2509d52 | ||
|
|
987806258a | ||
|
|
250148c991 | ||
|
|
1a9faa025a | ||
|
|
feba1cc4d0 | ||
|
|
079a5b28d8 | ||
|
|
c6eea9c7a9 | ||
|
|
5f5790ebf9 | ||
|
|
a3d99100ee | ||
|
|
ac6d2b0139 | ||
|
|
7becec4ee9 | ||
|
|
cd5e5b96da | ||
|
|
ad4cdcc512 | ||
|
|
6617049983 | ||
|
|
abec46a725 | ||
|
|
664820af59 | ||
|
|
88e987b902 | ||
|
|
b1460d44e8 | ||
|
|
785a6e8871 | ||
|
|
762ef422ca | ||
|
|
e2b5657c3e | ||
|
|
dbb53a663e | ||
|
|
5c964d144a | ||
|
|
289e81d6eb | ||
|
|
c002a592d1 | ||
|
|
def10af1e2 | ||
|
|
edaeae1b69 | ||
|
|
e06ee25800 | ||
|
|
be86a7d38e | ||
|
|
0a1483c482 | ||
|
|
11abd3cadf | ||
|
|
085cd1960e | ||
|
|
2769a73898 | ||
|
|
8373f20944 | ||
|
|
b84ca669c6 | ||
|
|
a1f0b9c253 | ||
|
|
f562fcd466 | ||
|
|
8c8668a3e6 | ||
|
|
9ce85cdf21 | ||
|
|
1c6cdb8328 | ||
|
|
005b42cb8d | ||
|
|
be062ccd34 | ||
|
|
c9d0c5353d | ||
|
|
2a6eb5fe23 | ||
|
|
d9ccce41bc | ||
|
|
3db69a3a71 | ||
|
|
faa50b0bbb | ||
|
|
1991e09035 | ||
|
|
4fcd081d27 | ||
|
|
5f1209f1db | ||
|
|
88e5a05253 | ||
|
|
981acc81c1 | ||
|
|
cf306ee605 | ||
|
|
9df0ba6e3a | ||
|
|
b6744dcd29 | ||
|
|
a164dfd532 | ||
|
|
2b46e5f977 | ||
|
|
9b4aebc385 | ||
|
|
97c3fb40e7 | ||
|
|
5aad6c71da | ||
|
|
a1ae338c37 | ||
|
|
c16b0daa22 | ||
|
|
9a29896291 | ||
|
|
a66dcb9419 | ||
|
|
87a920174e | ||
|
|
584192faba | ||
|
|
b61adbed4b | ||
|
|
7434800999 | ||
|
|
9828d2332a | ||
|
|
78095e3823 | ||
|
|
f41086fd26 | ||
|
|
0866dc3494 | ||
|
|
9f69fe6060 | ||
|
|
6ea15ced02 | ||
|
|
11525f26d0 | ||
|
|
f3bd1691ce | ||
|
|
d83a76a3aa | ||
|
|
2cd431b4a4 | ||
|
|
d22e4b8212 | ||
|
|
0e0e09c4fd | ||
|
|
a8816ef473 | ||
|
|
d315a24c1c | ||
|
|
e73a4bef4e | ||
|
|
d9339b8f8e | ||
|
|
a66a0680df | ||
|
|
98c7453741 | ||
|
|
1a5faca77c | ||
|
|
6e7fa1a39c | ||
|
|
7a6e742362 | ||
|
|
8a9ff31009 | ||
|
|
d310b8f86f | ||
|
|
4e297d92f3 | ||
|
|
e48147f87e | ||
|
|
85a7a7df2b | ||
|
|
92b22dc99e | ||
|
|
cb4d294608 | ||
|
|
df8504e6f7 | ||
|
|
7287c27b73 | ||
|
|
cc2e6182b6 | ||
|
|
40fd7790eb | ||
|
|
d733fb92cd | ||
|
|
1ae78e55c9 | ||
|
|
ff3f1897bc | ||
|
|
f42f2465a9 | ||
|
|
7ad0466d65 | ||
|
|
e8f748a67d | ||
|
|
81b1405448 | ||
|
|
98d9763f8e | ||
|
|
c9498fa54d | ||
|
|
0586d30a33 | ||
|
|
406b323e8e | ||
|
|
47838a506a | ||
|
|
d9ce042dff | ||
|
|
4fb11cb388 | ||
|
|
6769d1139e | ||
|
|
89e7975821 | ||
|
|
89bf24bd64 | ||
|
|
922735e5f2 | ||
|
|
ed75163ec4 | ||
|
|
8e0124be91 | ||
|
|
c98edd4a91 | ||
|
|
35bb71303e | ||
|
|
30054f71a8 | ||
|
|
f40eb1add1 | ||
|
|
e0affa21c4 | ||
|
|
b14c70df71 | ||
|
|
4c54ca5494 | ||
|
|
10f145f85c | ||
|
|
8930840a76 | ||
|
|
865698bcb4 | ||
|
|
a3678b490a | ||
|
|
5117859142 | ||
|
|
05c923a97f | ||
|
|
bedc3ab5a1 | ||
|
|
0cc4468091 | ||
|
|
5cfea3948f | ||
|
|
c947a6dd64 | ||
|
|
c54b8b9a15 | ||
|
|
0b86080166 | ||
|
|
0e352b7bcb | ||
|
|
bc8be73d6c | ||
|
|
d6f3c2515a | ||
|
|
fd9dcfa2ec | ||
|
|
39274fd5fa | ||
|
|
3d7fe40035 | ||
|
|
ec62308195 | ||
|
|
9c28011ea5 | ||
|
|
10059b2150 | ||
|
|
6fe99c7f3e | ||
|
|
2229d8d828 | ||
|
|
ad410d134f | ||
|
|
9b531aca47 | ||
|
|
2bb842367f | ||
|
|
916fca53ac | ||
|
|
7763cca9a2 | ||
|
|
3ad3cc77f9 | ||
|
|
998fab80b5 | ||
|
|
679c97bbe3 | ||
|
|
c87d70195d | ||
|
|
c2327fa538 | ||
|
|
bc076e69f7 | ||
|
|
1db9030f20 | ||
|
|
737ac33d5d | ||
|
|
fd17c249fd | ||
|
|
afce027bf3 | ||
|
|
6db1db55e9 | ||
|
|
08a025f67c | ||
|
|
8a2cc6f919 | ||
|
|
29b04bb3ef | ||
|
|
f3b09a13b8 | ||
|
|
e83ef7332d | ||
|
|
8ff834c328 | ||
|
|
4ee161bae2 | ||
|
|
27402b52b6 | ||
|
|
479dfc17f5 | ||
|
|
8e70a036b7 | ||
|
|
73b38f4db9 | ||
|
|
e357283789 | ||
|
|
2ce7914b7a | ||
|
|
fe60771943 | ||
|
|
464c13fcd0 | ||
|
|
a7b5ad55ab | ||
|
|
ccbcfecc6d | ||
|
|
7669ee8755 | ||
|
|
fdb7da4d45 | ||
|
|
ff6690afd2 | ||
|
|
f9c3f02d15 | ||
|
|
6c2600efa7 | ||
|
|
0e5c0fe31e | ||
|
|
4424d67073 | ||
|
|
26eb3b59ce | ||
|
|
4d909fbef3 | ||
|
|
89210accae | ||
|
|
7c5c02bba6 | ||
|
|
11faf3f810 | ||
|
|
30ea1adf61 | ||
|
|
546aa40aa3 | ||
|
|
78669363b1 | ||
|
|
bca749986a | ||
|
|
229938114d | ||
|
|
c5e418ab47 | ||
|
|
ecdd1f1d03 | ||
|
|
e687cd6f6a | ||
|
|
9b6721790f | ||
|
|
05df65577a | ||
|
|
52d21b9dda | ||
|
|
51f10d5f36 | ||
|
|
9275e6f373 | ||
|
|
4aef8c2242 | ||
|
|
780573dadd | ||
|
|
5e625ab132 | ||
|
|
34123681cf | ||
|
|
c72ff782ac | ||
|
|
6eb72507a5 | ||
|
|
26b18b431b | ||
|
|
1bae9289f5 | ||
|
|
5c5bcb33e3 | ||
|
|
bed5f76082 | ||
|
|
124c2f772e | ||
|
|
b38898753f | ||
|
|
86a120f11e | ||
|
|
da603dc3f8 | ||
|
|
2f3970f641 | ||
|
|
d759438ebd | ||
|
|
0102f1d693 | ||
|
|
53e443d816 | ||
|
|
98b9957753 | ||
|
|
509af4662d | ||
|
|
d1f2a819f5 | ||
|
|
a42bff675b | ||
|
|
c71a2137e6 | ||
|
|
5e1f0a2604 | ||
|
|
ccab382d7d | ||
|
|
a7aa78a349 | ||
|
|
5ae963c451 | ||
|
|
07c097c9ad | ||
|
|
fc92d7d1a0 | ||
|
|
9ba8f5c21f | ||
|
|
059b8c793a | ||
|
|
93da220c4a | ||
|
|
0feab162ff | ||
|
|
55a1291b1d | ||
|
|
b12a00cf4a | ||
|
|
52ee8c5615 | ||
|
|
68108c9fe9 | ||
|
|
88fbe85087 | ||
|
|
9b423cea4b | ||
|
|
9d891ecce1 | ||
|
|
16a7bd3bda | ||
|
|
6e9f3971a5 | ||
|
|
dddfeb17b7 | ||
|
|
538d67dbd9 | ||
|
|
090870b03e | ||
|
|
0a56a30ab2 | ||
|
|
8df1648329 | ||
|
|
b69c4b6b30 | ||
|
|
e0181342c0 | ||
|
|
f9f8b5c3d9 | ||
|
|
5fadf53580 | ||
|
|
da6ecd439c | ||
|
|
7914bcf486 | ||
|
|
c9a5893c3f | ||
|
|
7361657660 | ||
|
|
60e96bcb72 | ||
|
|
3f3fb373cc | ||
|
|
a829e6fc40 | ||
|
|
864e592bc5 | ||
|
|
411a661abd | ||
|
|
61ad6b9e0e | ||
|
|
2ff49c93bd | ||
|
|
120d06983d | ||
|
|
c9cbca5ec8 | ||
|
|
275942a246 | ||
|
|
a1b09fcbc6 | ||
|
|
26f0e9da00 | ||
|
|
4a2057c0b3 | ||
|
|
82ff786666 | ||
|
|
5773ff2afd | ||
|
|
1403a536c1 | ||
|
|
eba2f3820a | ||
|
|
253edebb93 | ||
|
|
7554988164 | ||
|
|
7087b1b5f2 | ||
|
|
abdb4a4778 | ||
|
|
ad1a074292 | ||
|
|
4c2cb2368a | ||
|
|
a14f8c281b | ||
|
|
d564ea8898 | ||
|
|
7868bfa2b1 | ||
|
|
353c85120f | ||
|
|
49f37e0346 | ||
|
|
3a2f4d55d7 | ||
|
|
8443dfc9dd | ||
|
|
2f8a1dbe20 | ||
|
|
6bfeb6af34 | ||
|
|
fd3f6ab7a4 | ||
|
|
d05d814efe | ||
|
|
cc0701a823 | ||
|
|
3906503c1b | ||
|
|
c3fe221e7a | ||
|
|
e530533f9e | ||
|
|
4c3e2c252a | ||
|
|
4ede04d72f | ||
|
|
5a6fd0efdb | ||
|
|
1a72a0dff4 | ||
|
|
e4ee6a5bfb | ||
|
|
e23e9ccdbb | ||
|
|
54a536e268 | ||
|
|
110eb39a51 | ||
|
|
fc15da826b | ||
|
|
7e2caf65bb | ||
|
|
d62dc25aa0 | ||
|
|
32bf2ed56a | ||
|
|
ebf877b7fa | ||
|
|
d8eeb9e17a | ||
|
|
f2a4e026a5 | ||
|
|
8229b12c9c | ||
|
|
3c07da433a | ||
|
|
1ed7935bf0 | ||
|
|
c743d43ce2 |
17
.env.example
17
.env.example
@@ -9,16 +9,20 @@ NEXT_PUBLIC_GITHUB_ID=""
|
||||
NEXT_PUBLIC_GITHUB_APP_NAME=""
|
||||
# Sentry DSN for error monitoring
|
||||
NEXT_PUBLIC_SENTRY_DSN=""
|
||||
# Enable/Disable OAUTH - default 0 for selfhosted instance
|
||||
# Enable/Disable OAUTH - default 0 for selfhosted instance
|
||||
NEXT_PUBLIC_ENABLE_OAUTH=0
|
||||
# Enable/Disable sentry
|
||||
NEXT_PUBLIC_ENABLE_SENTRY=0
|
||||
# Enable/Disable session recording
|
||||
# Enable/Disable session recording
|
||||
NEXT_PUBLIC_ENABLE_SESSION_RECORDER=0
|
||||
# Enable/Disable event tracking
|
||||
NEXT_PUBLIC_TRACK_EVENTS=0
|
||||
# Slack for Slack Integration
|
||||
NEXT_PUBLIC_SLACK_CLIENT_ID=""
|
||||
# For Telemetry, set it to "app.plane.so"
|
||||
NEXT_PUBLIC_PLAUSIBLE_DOMAIN=""
|
||||
# public boards deploy url
|
||||
NEXT_PUBLIC_DEPLOY_URL=""
|
||||
|
||||
# Backend
|
||||
# Debug value for api server use it as 0 for production use
|
||||
@@ -59,15 +63,16 @@ AWS_S3_BUCKET_NAME="uploads"
|
||||
FILE_SIZE_LIMIT=5242880
|
||||
|
||||
# GPT settings
|
||||
OPENAI_API_KEY=""
|
||||
GPT_ENGINE=""
|
||||
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
||||
OPENAI_API_KEY="sk-" # add your openai key here
|
||||
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
||||
|
||||
# Github
|
||||
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
||||
|
||||
# Settings related to Docker
|
||||
DOCKERIZED=1
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
|
||||
# Nginx Configuration
|
||||
@@ -79,4 +84,4 @@ DEFAULT_PASSWORD="password123"
|
||||
|
||||
# SignUps
|
||||
ENABLE_SIGNUP="1"
|
||||
# Auto generated and Required that will be generated from setup.sh
|
||||
# Auto generated and Required that will be generated from setup.sh
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -70,4 +70,6 @@ package-lock.json
|
||||
# lock files
|
||||
package-lock.json
|
||||
pnpm-lock.yaml
|
||||
pnpm-workspace.yaml
|
||||
pnpm-workspace.yaml
|
||||
|
||||
.npmrc
|
||||
|
||||
@@ -23,7 +23,6 @@ You can open a new issue with this [issue form](https://github.com/makeplane/pla
|
||||
- Python version 3.8+
|
||||
- Postgres version v14
|
||||
- Redis version v6.2.7
|
||||
- pnpm version 7.22.0
|
||||
|
||||
### Setup the project
|
||||
|
||||
|
||||
13
Dockerfile
13
Dockerfile
@@ -5,9 +5,11 @@ WORKDIR /app
|
||||
ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER
|
||||
|
||||
RUN yarn global add turbo
|
||||
RUN apk add tree
|
||||
COPY . .
|
||||
|
||||
RUN turbo prune --scope=app --docker
|
||||
RUN turbo prune --scope=app --scope=plane-deploy --docker
|
||||
CMD tree -I node_modules/
|
||||
|
||||
# Add lockfile and package.json's of isolated subworkspace
|
||||
FROM node:18-alpine AS installer
|
||||
@@ -21,14 +23,14 @@ COPY --from=builder /app/out/json/ .
|
||||
COPY --from=builder /app/out/yarn.lock ./yarn.lock
|
||||
RUN yarn install
|
||||
|
||||
# Build the project
|
||||
# # Build the project
|
||||
COPY --from=builder /app/out/full/ .
|
||||
COPY turbo.json turbo.json
|
||||
COPY replace-env-vars.sh /usr/local/bin/
|
||||
USER root
|
||||
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
||||
|
||||
RUN yarn turbo run build --filter=app
|
||||
RUN yarn turbo run build
|
||||
|
||||
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
||||
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
||||
@@ -96,11 +98,16 @@ RUN adduser --system --uid 1001 captain
|
||||
|
||||
COPY --from=installer /app/apps/app/next.config.js .
|
||||
COPY --from=installer /app/apps/app/package.json .
|
||||
COPY --from=installer /app/apps/space/next.config.js .
|
||||
COPY --from=installer /app/apps/space/package.json .
|
||||
|
||||
COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone ./
|
||||
|
||||
COPY --from=installer --chown=captain:plane /app/apps/app/.next/static ./apps/app/.next/static
|
||||
|
||||
COPY --from=installer --chown=captain:plane /app/apps/space/.next/standalone ./
|
||||
COPY --from=installer --chown=captain:plane /app/apps/space/.next ./apps/space/.next
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
# RUN rm /etc/nginx/conf.d/default.conf
|
||||
|
||||
36
README.md
36
README.md
@@ -2,7 +2,7 @@
|
||||
|
||||
<p align="center">
|
||||
<a href="https://plane.so">
|
||||
<img src="https://res.cloudinary.com/toolspacedev/image/upload/v1680596414/Plane/Plane_Icon_Blue_on_White_150x150_muysa3.jpg" alt="Plane Logo" width="70">
|
||||
<img src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_logo_.webp" alt="Plane Logo" width="70">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
@@ -11,22 +11,22 @@
|
||||
|
||||
<p align="center">
|
||||
<a href="https://discord.com/invite/A92xrEGCge">
|
||||
<img alt="Discord" src="https://img.shields.io/discord/1031547764020084846?color=5865F2&label=Discord&style=for-the-badge" />
|
||||
<img alt="Discord online members" src="https://img.shields.io/discord/1031547764020084846?color=5865F2&label=Discord&style=for-the-badge" />
|
||||
</a>
|
||||
<img alt="Discord" src="https://img.shields.io/github/commit-activity/m/makeplane/plane?style=for-the-badge" />
|
||||
<img alt="Commit activity per month" src="https://img.shields.io/github/commit-activity/m/makeplane/plane?style=for-the-badge" />
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<a href="https://app.plane.so/#gh-light-mode-only" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Screen.png?updatedAt=1684942001069"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_screen.webp"
|
||||
alt="Plane Screens"
|
||||
width="100%"
|
||||
/>
|
||||
</a>
|
||||
<a href="https://app.plane.so/#gh-dark-mode-only" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Screens_Dark_Mode.png?updatedAt=1684942388044"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_screens_dark_mode.webp"
|
||||
alt="Plane Screens"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -61,14 +61,16 @@ chmod +x setup.sh
|
||||
|
||||
> If running in a cloud env replace localhost with public facing IP address of the VM
|
||||
|
||||
- Export Environment Variables
|
||||
- Setup Tiptap Pro
|
||||
|
||||
Visit [Tiptap Pro](https://collab.tiptap.dev/pro-extensions) and signup (it is free).
|
||||
|
||||
Create a **`.npmrc`** file, copy the following and replace your registry token generated from Tiptap Pro.
|
||||
|
||||
```bash
|
||||
set -a
|
||||
source .env
|
||||
set +a
|
||||
```
|
||||
|
||||
@tiptap-pro:registry=https://registry.tiptap.dev/
|
||||
//registry.tiptap.dev/:_authToken=YOUR_REGISTRY_TOKEN
|
||||
```
|
||||
- Run Docker compose up
|
||||
|
||||
```bash
|
||||
@@ -94,7 +96,7 @@ docker compose up -d
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Views_Dark_Mode.png?updatedAt=1684943050275"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_views_dark_mode.webp"
|
||||
alt="Plane Views"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -103,7 +105,7 @@ docker compose up -d
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Issue_Detail_Dark_Mode.png?updatedAt=1684943050202"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_issue_detail_dark_mode.webp"
|
||||
alt="Plane Issue Details"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -112,7 +114,7 @@ docker compose up -d
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Cycles___Modules_Dark_Mode.png?updatedAt=1684943050281"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_cycles_modules_dark_mode.webp"
|
||||
alt="Plane Cycles and Modules"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -121,7 +123,7 @@ docker compose up -d
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Analytics_Dark_Mode.png?updatedAt=1684944596824"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_analytics_dark_mode.webp"
|
||||
alt="Plane Analytics"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -130,7 +132,7 @@ docker compose up -d
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Pages_Dark_Mode.png?updatedAt=1684943050202"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_pages_dark_mode.webp"
|
||||
alt="Plane Pages"
|
||||
width="100%"
|
||||
/>
|
||||
@@ -140,7 +142,7 @@ docker compose up -d
|
||||
<p>
|
||||
<a href="https://plane.so" target="_blank">
|
||||
<img
|
||||
src="https://ik.imagekit.io/killbluedog/Plane_Commad_K_Dark_Mode.png?updatedAt=1684943050312"
|
||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_commad_k_dark_mode.webp"
|
||||
alt="Plane Command Menu"
|
||||
width="100%"
|
||||
/>
|
||||
|
||||
@@ -49,7 +49,7 @@ USER root
|
||||
RUN apk --no-cache add "bash~=5.2"
|
||||
COPY ./bin ./bin/
|
||||
|
||||
RUN chmod +x ./bin/takeoff ./bin/worker
|
||||
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
||||
RUN chmod -R 777 /code
|
||||
|
||||
USER captain
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
web: gunicorn -w 4 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:$PORT --config gunicorn.config.py --max-requests 10000 --max-requests-jitter 1000 --access-logfile -
|
||||
worker: celery -A plane worker -l info
|
||||
worker: celery -A plane worker -l info
|
||||
beat: celery -A plane beat -l INFO
|
||||
5
apiserver/bin/beat
Normal file
5
apiserver/bin/beat
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
python manage.py wait_for_db
|
||||
celery -A plane beat -l info
|
||||
@@ -6,4 +6,4 @@ python manage.py migrate
|
||||
# Create a Default User
|
||||
python bin/user_script.py
|
||||
|
||||
exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --config gunicorn.config.py --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||
exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import os, sys
|
||||
import os, sys, random, string
|
||||
import uuid
|
||||
|
||||
sys.path.append("/code")
|
||||
@@ -19,9 +19,9 @@ def populate():
|
||||
user = User.objects.create(email=default_email, username=uuid.uuid4().hex)
|
||||
user.set_password(default_password)
|
||||
user.save()
|
||||
print("User created")
|
||||
|
||||
print("Success")
|
||||
print(f"User created with an email: {default_email}")
|
||||
else:
|
||||
print(f"User already exists with the default email: {default_email}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
from .workspace import WorkSpaceBasePermission, WorkSpaceAdminPermission, WorkspaceEntityPermission
|
||||
from .workspace import WorkSpaceBasePermission, WorkSpaceAdminPermission, WorkspaceEntityPermission, WorkspaceViewerPermission
|
||||
from .project import ProjectBasePermission, ProjectEntityPermission, ProjectMemberPermission, ProjectLitePermission
|
||||
|
||||
@@ -61,3 +61,13 @@ class WorkspaceEntityPermission(BasePermission):
|
||||
return WorkspaceMember.objects.filter(
|
||||
member=request.user, workspace__slug=view.workspace_slug
|
||||
).exists()
|
||||
|
||||
|
||||
class WorkspaceViewerPermission(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
if request.user.is_anonymous:
|
||||
return False
|
||||
|
||||
return WorkspaceMember.objects.filter(
|
||||
member=request.user, workspace__slug=view.workspace_slug, role__gte=10
|
||||
).exists()
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
from .base import BaseSerializer
|
||||
from .people import (
|
||||
ChangePasswordSerializer,
|
||||
ResetPasswordSerializer,
|
||||
TokenSerializer,
|
||||
)
|
||||
from .user import UserSerializer, UserLiteSerializer
|
||||
from .user import UserSerializer, UserLiteSerializer, ChangePasswordSerializer, ResetPasswordSerializer, UserAdminLiteSerializer
|
||||
from .workspace import (
|
||||
WorkSpaceSerializer,
|
||||
WorkSpaceMemberSerializer,
|
||||
@@ -12,6 +7,7 @@ from .workspace import (
|
||||
WorkSpaceMemberInviteSerializer,
|
||||
WorkspaceLiteSerializer,
|
||||
WorkspaceThemeSerializer,
|
||||
WorkspaceMemberAdminSerializer,
|
||||
)
|
||||
from .project import (
|
||||
ProjectSerializer,
|
||||
@@ -21,10 +17,13 @@ from .project import (
|
||||
ProjectIdentifierSerializer,
|
||||
ProjectFavoriteSerializer,
|
||||
ProjectLiteSerializer,
|
||||
ProjectMemberLiteSerializer,
|
||||
ProjectDeployBoardSerializer,
|
||||
ProjectMemberAdminSerializer,
|
||||
)
|
||||
from .state import StateSerializer, StateLiteSerializer
|
||||
from .view import IssueViewSerializer, IssueViewFavoriteSerializer
|
||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer
|
||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer
|
||||
from .asset import FileAssetSerializer
|
||||
from .issue import (
|
||||
IssueCreateSerializer,
|
||||
@@ -41,6 +40,10 @@ from .issue import (
|
||||
IssueLinkSerializer,
|
||||
IssueLiteSerializer,
|
||||
IssueAttachmentSerializer,
|
||||
IssueSubscriberSerializer,
|
||||
IssueReactionSerializer,
|
||||
CommentReactionSerializer,
|
||||
IssueVoteSerializer,
|
||||
)
|
||||
|
||||
from .module import (
|
||||
@@ -74,4 +77,9 @@ from .estimate import (
|
||||
)
|
||||
|
||||
from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSerializer
|
||||
|
||||
from .analytic import AnalyticViewSerializer
|
||||
|
||||
from .notification import NotificationSerializer
|
||||
|
||||
from .exporter import ExporterHistorySerializer
|
||||
|
||||
@@ -12,6 +12,12 @@ from .workspace import WorkspaceLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from plane.db.models import Cycle, CycleIssue, CycleFavorite
|
||||
|
||||
class CycleWriteSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Cycle
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class CycleSerializer(BaseSerializer):
|
||||
owned_by = UserLiteSerializer(read_only=True)
|
||||
@@ -35,6 +41,7 @@ class CycleSerializer(BaseSerializer):
|
||||
{
|
||||
"avatar": assignee.avatar,
|
||||
"first_name": assignee.first_name,
|
||||
"display_name": assignee.display_name,
|
||||
"id": assignee.id,
|
||||
}
|
||||
for issue_cycle in obj.issue_cycle.all()
|
||||
|
||||
26
apiserver/plane/api/serializers/exporter.py
Normal file
26
apiserver/plane/api/serializers/exporter.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import ExporterHistory
|
||||
from .user import UserLiteSerializer
|
||||
|
||||
|
||||
class ExporterHistorySerializer(BaseSerializer):
|
||||
initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ExporterHistory
|
||||
fields = [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"project",
|
||||
"provider",
|
||||
"status",
|
||||
"url",
|
||||
"initiated_by",
|
||||
"initiated_by_detail",
|
||||
"token",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = fields
|
||||
@@ -19,6 +19,7 @@ from plane.db.models import (
|
||||
IssueProperty,
|
||||
IssueBlocker,
|
||||
IssueAssignee,
|
||||
IssueSubscriber,
|
||||
IssueLabel,
|
||||
Label,
|
||||
IssueBlocker,
|
||||
@@ -28,6 +29,9 @@ from plane.db.models import (
|
||||
ModuleIssue,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
IssueReaction,
|
||||
CommentReaction,
|
||||
IssueVote,
|
||||
)
|
||||
|
||||
|
||||
@@ -49,6 +53,20 @@ class IssueFlatSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class IssueProjectLiteSerializer(BaseSerializer):
|
||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = [
|
||||
"id",
|
||||
"project_detail",
|
||||
"name",
|
||||
"sequence_id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
##TODO: Find a better way to write this serializer
|
||||
## Find a better approach to save manytomany?
|
||||
class IssueCreateSerializer(BaseSerializer):
|
||||
@@ -94,14 +112,26 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
def validate(self, data):
|
||||
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
|
||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
blockers = validated_data.pop("blockers_list", None)
|
||||
assignees = validated_data.pop("assignees_list", None)
|
||||
labels = validated_data.pop("labels_list", None)
|
||||
blocks = validated_data.pop("blocks_list", None)
|
||||
|
||||
project = self.context["project"]
|
||||
issue = Issue.objects.create(**validated_data, project=project)
|
||||
project_id = self.context["project_id"]
|
||||
workspace_id = self.context["workspace_id"]
|
||||
default_assignee_id = self.context["default_assignee_id"]
|
||||
|
||||
issue = Issue.objects.create(**validated_data, project_id=project_id)
|
||||
|
||||
# Issue Audit Users
|
||||
created_by_id = issue.created_by_id
|
||||
updated_by_id = issue.updated_by_id
|
||||
|
||||
if blockers is not None and len(blockers):
|
||||
IssueBlocker.objects.bulk_create(
|
||||
@@ -109,10 +139,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueBlocker(
|
||||
block=issue,
|
||||
blocked_by=blocker,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for blocker in blockers
|
||||
],
|
||||
@@ -125,10 +155,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueAssignee(
|
||||
assignee=user,
|
||||
issue=issue,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for user in assignees
|
||||
],
|
||||
@@ -136,14 +166,14 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
)
|
||||
else:
|
||||
# Then assign it to default assignee
|
||||
if project.default_assignee is not None:
|
||||
if default_assignee_id is not None:
|
||||
IssueAssignee.objects.create(
|
||||
assignee=project.default_assignee,
|
||||
assignee_id=default_assignee_id,
|
||||
issue=issue,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
|
||||
if labels is not None and len(labels):
|
||||
@@ -152,10 +182,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueLabel(
|
||||
label=label,
|
||||
issue=issue,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
@@ -168,10 +198,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueBlocker(
|
||||
block=block,
|
||||
blocked_by=issue,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
created_by=issue.created_by,
|
||||
updated_by=issue.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for block in blocks
|
||||
],
|
||||
@@ -186,6 +216,12 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
labels = validated_data.pop("labels_list", None)
|
||||
blocks = validated_data.pop("blocks_list", None)
|
||||
|
||||
# Related models
|
||||
project_id = instance.project_id
|
||||
workspace_id = instance.workspace_id
|
||||
created_by_id = instance.created_by_id
|
||||
updated_by_id = instance.updated_by_id
|
||||
|
||||
if blockers is not None:
|
||||
IssueBlocker.objects.filter(block=instance).delete()
|
||||
IssueBlocker.objects.bulk_create(
|
||||
@@ -193,10 +229,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueBlocker(
|
||||
block=instance,
|
||||
blocked_by=blocker,
|
||||
project=instance.project,
|
||||
workspace=instance.project.workspace,
|
||||
created_by=instance.created_by,
|
||||
updated_by=instance.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for blocker in blockers
|
||||
],
|
||||
@@ -210,10 +246,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueAssignee(
|
||||
assignee=user,
|
||||
issue=instance,
|
||||
project=instance.project,
|
||||
workspace=instance.project.workspace,
|
||||
created_by=instance.created_by,
|
||||
updated_by=instance.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for user in assignees
|
||||
],
|
||||
@@ -227,10 +263,10 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueLabel(
|
||||
label=label,
|
||||
issue=instance,
|
||||
project=instance.project,
|
||||
workspace=instance.project.workspace,
|
||||
created_by=instance.created_by,
|
||||
updated_by=instance.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
@@ -244,23 +280,25 @@ class IssueCreateSerializer(BaseSerializer):
|
||||
IssueBlocker(
|
||||
block=block,
|
||||
blocked_by=instance,
|
||||
project=instance.project,
|
||||
workspace=instance.project.workspace,
|
||||
created_by=instance.created_by,
|
||||
updated_by=instance.updated_by,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for block in blocks
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
# Time updation occues even when other related models are updated
|
||||
instance.updated_at = timezone.now()
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class IssueActivitySerializer(BaseSerializer):
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
|
||||
class Meta:
|
||||
model = IssueActivity
|
||||
@@ -334,19 +372,31 @@ class IssueLabelSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class BlockedIssueSerializer(BaseSerializer):
|
||||
blocked_issue_detail = IssueFlatSerializer(source="block", read_only=True)
|
||||
blocked_issue_detail = IssueProjectLiteSerializer(source="block", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = IssueBlocker
|
||||
fields = "__all__"
|
||||
fields = [
|
||||
"blocked_issue_detail",
|
||||
"blocked_by",
|
||||
"block",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class BlockerIssueSerializer(BaseSerializer):
|
||||
blocker_issue_detail = IssueFlatSerializer(source="blocked_by", read_only=True)
|
||||
blocker_issue_detail = IssueProjectLiteSerializer(
|
||||
source="blocked_by", read_only=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = IssueBlocker
|
||||
fields = "__all__"
|
||||
fields = [
|
||||
"blocker_issue_detail",
|
||||
"blocked_by",
|
||||
"block",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class IssueAssigneeSerializer(BaseSerializer):
|
||||
@@ -459,11 +509,103 @@ class IssueAttachmentSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class IssueReactionSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueReaction
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"actor",
|
||||
]
|
||||
|
||||
|
||||
class IssueReactionLiteSerializer(BaseSerializer):
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
|
||||
class Meta:
|
||||
model = IssueReaction
|
||||
fields = [
|
||||
"id",
|
||||
"reaction",
|
||||
"issue",
|
||||
"actor_detail",
|
||||
]
|
||||
|
||||
|
||||
class CommentReactionLiteSerializer(BaseSerializer):
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
|
||||
class Meta:
|
||||
model = CommentReaction
|
||||
fields = [
|
||||
"id",
|
||||
"reaction",
|
||||
"comment",
|
||||
"actor_detail",
|
||||
]
|
||||
|
||||
|
||||
class CommentReactionSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = CommentReaction
|
||||
fields = "__all__"
|
||||
read_only_fields = ["workspace", "project", "comment", "actor"]
|
||||
|
||||
|
||||
|
||||
class IssueVoteSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = IssueVote
|
||||
fields = ["issue", "vote", "workspace_id", "project_id", "actor"]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class IssueCommentSerializer(BaseSerializer):
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True)
|
||||
|
||||
|
||||
class Meta:
|
||||
model = IssueComment
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"access",
|
||||
]
|
||||
|
||||
|
||||
class IssueStateFlatSerializer(BaseSerializer):
|
||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = [
|
||||
"id",
|
||||
"sequence_id",
|
||||
"name",
|
||||
"state_detail",
|
||||
"project_detail",
|
||||
]
|
||||
|
||||
|
||||
# Issue Serializer with state details
|
||||
class IssueStateSerializer(BaseSerializer):
|
||||
state_detail = StateSerializer(read_only=True, source="state")
|
||||
project_detail = ProjectSerializer(read_only=True, source="project")
|
||||
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
bridge_id = serializers.UUIDField(read_only=True)
|
||||
@@ -476,9 +618,9 @@ class IssueStateSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class IssueSerializer(BaseSerializer):
|
||||
project_detail = ProjectSerializer(read_only=True, source="project")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
state_detail = StateSerializer(read_only=True, source="state")
|
||||
parent_detail = IssueFlatSerializer(read_only=True, source="parent")
|
||||
parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
|
||||
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||
# List of issues blocked by this issue
|
||||
@@ -490,6 +632,7 @@ class IssueSerializer(BaseSerializer):
|
||||
issue_link = IssueLinkSerializer(read_only=True, many=True)
|
||||
issue_attachment = IssueAttachmentSerializer(read_only=True, many=True)
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
issue_reactions = IssueReactionLiteSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
@@ -515,6 +658,7 @@ class IssueLiteSerializer(BaseSerializer):
|
||||
module_id = serializers.UUIDField(read_only=True)
|
||||
attachment_count = serializers.IntegerField(read_only=True)
|
||||
link_count = serializers.IntegerField(read_only=True)
|
||||
issue_reactions = IssueReactionLiteSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
@@ -530,3 +674,14 @@ class IssueLiteSerializer(BaseSerializer):
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
class IssueSubscriberSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueSubscriber
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
]
|
||||
|
||||
@@ -106,7 +106,7 @@ class ModuleFlatSerializer(BaseSerializer):
|
||||
|
||||
class ModuleIssueSerializer(BaseSerializer):
|
||||
module_detail = ModuleFlatSerializer(read_only=True, source="module")
|
||||
issue_detail = IssueStateSerializer(read_only=True, source="issue")
|
||||
issue_detail = ProjectLiteSerializer(read_only=True, source="issue")
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -151,7 +151,7 @@ class ModuleLinkSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class ModuleSerializer(BaseSerializer):
|
||||
project_detail = ProjectSerializer(read_only=True, source="project")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
lead_detail = UserLiteSerializer(read_only=True, source="lead")
|
||||
members_detail = UserLiteSerializer(read_only=True, many=True, source="members")
|
||||
link_module = ModuleLinkSerializer(read_only=True, many=True)
|
||||
|
||||
12
apiserver/plane/api/serializers/notification.py
Normal file
12
apiserver/plane/api/serializers/notification.py
Normal file
@@ -0,0 +1,12 @@
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from plane.db.models import Notification
|
||||
|
||||
class NotificationSerializer(BaseSerializer):
|
||||
triggered_by_details = UserLiteSerializer(read_only=True, source="triggered_by")
|
||||
|
||||
class Meta:
|
||||
model = Notification
|
||||
fields = "__all__"
|
||||
|
||||
@@ -3,7 +3,7 @@ from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .issue import IssueFlatSerializer, LabelSerializer
|
||||
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
||||
from .workspace import WorkspaceLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from plane.db.models import Page, PageBlock, PageFavorite, PageLabel, Label
|
||||
@@ -23,16 +23,22 @@ class PageBlockSerializer(BaseSerializer):
|
||||
"page",
|
||||
]
|
||||
|
||||
class PageBlockLiteSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = PageBlock
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class PageSerializer(BaseSerializer):
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
||||
labels_list = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
blocks = PageBlockSerializer(read_only=True, many=True)
|
||||
blocks = PageBlockLiteSerializer(read_only=True, many=True)
|
||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
Serializer,
|
||||
CharField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.authtoken.models import Token
|
||||
from rest_framework_simplejwt.tokens import RefreshToken
|
||||
|
||||
|
||||
from plane.db.models import User
|
||||
|
||||
|
||||
class UserSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = "__all__"
|
||||
extra_kwargs = {"password": {"write_only": True}}
|
||||
|
||||
|
||||
class ChangePasswordSerializer(Serializer):
|
||||
model = User
|
||||
|
||||
"""
|
||||
Serializer for password change endpoint.
|
||||
"""
|
||||
old_password = CharField(required=True)
|
||||
new_password = CharField(required=True)
|
||||
|
||||
|
||||
class ResetPasswordSerializer(Serializer):
|
||||
model = User
|
||||
|
||||
"""
|
||||
Serializer for password change endpoint.
|
||||
"""
|
||||
new_password = CharField(required=True)
|
||||
confirm_password = CharField(required=True)
|
||||
|
||||
|
||||
class TokenSerializer(ModelSerializer):
|
||||
|
||||
user = UserSerializer()
|
||||
access_token = SerializerMethodField()
|
||||
refresh_token = SerializerMethodField()
|
||||
|
||||
def get_access_token(self, obj):
|
||||
refresh_token = RefreshToken.for_user(obj.user)
|
||||
return str(refresh_token.access_token)
|
||||
|
||||
def get_refresh_token(self, obj):
|
||||
refresh_token = RefreshToken.for_user(obj.user)
|
||||
return str(refresh_token)
|
||||
|
||||
class Meta:
|
||||
model = Token
|
||||
fields = "__all__"
|
||||
@@ -7,13 +7,14 @@ from rest_framework import serializers
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer
|
||||
from plane.api.serializers.user import UserLiteSerializer
|
||||
from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
ProjectMember,
|
||||
ProjectMemberInvite,
|
||||
ProjectIdentifier,
|
||||
ProjectFavorite,
|
||||
ProjectDeployBoard,
|
||||
)
|
||||
|
||||
|
||||
@@ -77,6 +78,21 @@ class ProjectSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError(detail="Project Identifier is already taken")
|
||||
|
||||
|
||||
class ProjectLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = [
|
||||
"id",
|
||||
"identifier",
|
||||
"name",
|
||||
"cover_image",
|
||||
"icon_prop",
|
||||
"emoji",
|
||||
"description",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class ProjectDetailSerializer(BaseSerializer):
|
||||
workspace = WorkSpaceSerializer(read_only=True)
|
||||
default_assignee = UserLiteSerializer(read_only=True)
|
||||
@@ -86,6 +102,9 @@ class ProjectDetailSerializer(BaseSerializer):
|
||||
total_cycles = serializers.IntegerField(read_only=True)
|
||||
total_modules = serializers.IntegerField(read_only=True)
|
||||
is_member = serializers.BooleanField(read_only=True)
|
||||
sort_order = serializers.FloatField(read_only=True)
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
is_deployed = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
@@ -94,7 +113,7 @@ class ProjectDetailSerializer(BaseSerializer):
|
||||
|
||||
class ProjectMemberSerializer(BaseSerializer):
|
||||
workspace = WorkSpaceSerializer(read_only=True)
|
||||
project = ProjectSerializer(read_only=True)
|
||||
project = ProjectLiteSerializer(read_only=True)
|
||||
member = UserLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -102,9 +121,19 @@ class ProjectMemberSerializer(BaseSerializer):
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ProjectMemberAdminSerializer(BaseSerializer):
|
||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||
project = ProjectLiteSerializer(read_only=True)
|
||||
member = UserAdminLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ProjectMember
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ProjectMemberInviteSerializer(BaseSerializer):
|
||||
project = ProjectSerializer(read_only=True)
|
||||
workspace = WorkSpaceSerializer(read_only=True)
|
||||
project = ProjectLiteSerializer(read_only=True)
|
||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ProjectMemberInvite
|
||||
@@ -118,7 +147,7 @@ class ProjectIdentifierSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class ProjectFavoriteSerializer(BaseSerializer):
|
||||
project_detail = ProjectSerializer(source="project", read_only=True)
|
||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ProjectFavorite
|
||||
@@ -129,8 +158,24 @@ class ProjectFavoriteSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class ProjectLiteSerializer(BaseSerializer):
|
||||
class ProjectMemberLiteSerializer(BaseSerializer):
|
||||
member = UserLiteSerializer(read_only=True)
|
||||
is_subscribed = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = ["id", "identifier", "name"]
|
||||
model = ProjectMember
|
||||
fields = ["member", "id", "is_subscribed"]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class ProjectDeployBoardSerializer(BaseSerializer):
|
||||
project_details = ProjectLiteSerializer(read_only=True, source="project")
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
|
||||
class Meta:
|
||||
model = ProjectDeployBoard
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project" "anchor",
|
||||
]
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
# Third party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module import
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import User
|
||||
@@ -37,11 +40,50 @@ class UserLiteSerializer(BaseSerializer):
|
||||
"id",
|
||||
"first_name",
|
||||
"last_name",
|
||||
"email",
|
||||
"avatar",
|
||||
"is_bot",
|
||||
"display_name",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"is_bot",
|
||||
]
|
||||
|
||||
|
||||
class UserAdminLiteSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
"id",
|
||||
"first_name",
|
||||
"last_name",
|
||||
"avatar",
|
||||
"is_bot",
|
||||
"display_name",
|
||||
"email",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"is_bot",
|
||||
]
|
||||
|
||||
|
||||
class ChangePasswordSerializer(serializers.Serializer):
|
||||
model = User
|
||||
|
||||
"""
|
||||
Serializer for password change endpoint.
|
||||
"""
|
||||
old_password = serializers.CharField(required=True)
|
||||
new_password = serializers.CharField(required=True)
|
||||
|
||||
|
||||
class ResetPasswordSerializer(serializers.Serializer):
|
||||
model = User
|
||||
|
||||
"""
|
||||
Serializer for password change endpoint.
|
||||
"""
|
||||
new_password = serializers.CharField(required=True)
|
||||
confirm_password = serializers.CharField(required=True)
|
||||
|
||||
@@ -3,7 +3,7 @@ from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from .user import UserLiteSerializer, UserAdminLiteSerializer
|
||||
|
||||
from plane.db.models import (
|
||||
User,
|
||||
@@ -33,10 +33,30 @@ class WorkSpaceSerializer(BaseSerializer):
|
||||
"owner",
|
||||
]
|
||||
|
||||
class WorkspaceLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Workspace
|
||||
fields = [
|
||||
"name",
|
||||
"slug",
|
||||
"id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
|
||||
class WorkSpaceMemberSerializer(BaseSerializer):
|
||||
member = UserLiteSerializer(read_only=True)
|
||||
workspace = WorkSpaceSerializer(read_only=True)
|
||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = WorkspaceMember
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class WorkspaceMemberAdminSerializer(BaseSerializer):
|
||||
member = UserAdminLiteSerializer(read_only=True)
|
||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = WorkspaceMember
|
||||
@@ -101,17 +121,6 @@ class TeamSerializer(BaseSerializer):
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class WorkspaceLiteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Workspace
|
||||
fields = [
|
||||
"name",
|
||||
"slug",
|
||||
"id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class WorkspaceThemeSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = WorkspaceTheme
|
||||
|
||||
@@ -22,6 +22,7 @@ from plane.api.views import (
|
||||
# User
|
||||
UserEndpoint,
|
||||
UpdateUserOnBoardedEndpoint,
|
||||
UpdateUserTourCompletedEndpoint,
|
||||
UserActivityEndpoint,
|
||||
## End User
|
||||
# Workspaces
|
||||
@@ -31,6 +32,7 @@ from plane.api.views import (
|
||||
InviteWorkspaceEndpoint,
|
||||
JoinWorkspaceEndpoint,
|
||||
WorkSpaceMemberViewSet,
|
||||
WorkspaceMembersEndpoint,
|
||||
WorkspaceInvitationsViewset,
|
||||
UserWorkspaceInvitationsEndpoint,
|
||||
WorkspaceMemberUserEndpoint,
|
||||
@@ -44,6 +46,11 @@ from plane.api.views import (
|
||||
UserIssueCompletedGraphEndpoint,
|
||||
UserWorkspaceDashboardEndpoint,
|
||||
WorkspaceThemeViewSet,
|
||||
WorkspaceUserProfileStatsEndpoint,
|
||||
WorkspaceUserActivityEndpoint,
|
||||
WorkspaceUserProfileEndpoint,
|
||||
WorkspaceUserProfileIssuesEndpoint,
|
||||
WorkspaceLabelsEndpoint,
|
||||
## End Workspaces
|
||||
# File Assets
|
||||
FileAssetEndpoint,
|
||||
@@ -53,6 +60,7 @@ from plane.api.views import (
|
||||
ProjectViewSet,
|
||||
InviteProjectEndpoint,
|
||||
ProjectMemberViewSet,
|
||||
ProjectMemberEndpoint,
|
||||
ProjectMemberInvitationsViewset,
|
||||
ProjectMemberUserEndpoint,
|
||||
AddMemberToProjectEndpoint,
|
||||
@@ -76,6 +84,12 @@ from plane.api.views import (
|
||||
IssueLinkViewSet,
|
||||
BulkCreateIssueLabelsEndpoint,
|
||||
IssueAttachmentEndpoint,
|
||||
IssueArchiveViewSet,
|
||||
IssueSubscriberViewSet,
|
||||
IssueCommentPublicViewSet,
|
||||
IssueReactionViewSet,
|
||||
CommentReactionViewSet,
|
||||
ExportIssuesEndpoint,
|
||||
## End Issues
|
||||
# States
|
||||
StateViewSet,
|
||||
@@ -148,6 +162,19 @@ from plane.api.views import (
|
||||
ExportAnalyticsEndpoint,
|
||||
DefaultAnalyticsEndpoint,
|
||||
## End Analytics
|
||||
# Notification
|
||||
NotificationViewSet,
|
||||
UnreadNotificationEndpoint,
|
||||
## End Notification
|
||||
# Public Boards
|
||||
ProjectDeployBoardViewSet,
|
||||
ProjectDeployBoardIssuesPublicEndpoint,
|
||||
ProjectDeployBoardPublicSettingsEndpoint,
|
||||
IssueReactionPublicViewSet,
|
||||
CommentReactionPublicViewSet,
|
||||
InboxIssuePublicViewSet,
|
||||
IssueVotePublicViewSet,
|
||||
## End Public Boards
|
||||
)
|
||||
|
||||
|
||||
@@ -197,7 +224,12 @@ urlpatterns = [
|
||||
path(
|
||||
"users/me/onboard/",
|
||||
UpdateUserOnBoardedEndpoint.as_view(),
|
||||
name="change-password",
|
||||
name="user-onboard",
|
||||
),
|
||||
path(
|
||||
"users/me/tour-completed/",
|
||||
UpdateUserTourCompletedEndpoint.as_view(),
|
||||
name="user-tour",
|
||||
),
|
||||
path("users/activities/", UserActivityEndpoint.as_view(), name="user-activities"),
|
||||
# user workspaces
|
||||
@@ -295,7 +327,6 @@ urlpatterns = [
|
||||
{
|
||||
"delete": "destroy",
|
||||
"get": "retrieve",
|
||||
"get": "retrieve",
|
||||
}
|
||||
),
|
||||
name="workspace",
|
||||
@@ -316,6 +347,11 @@ urlpatterns = [
|
||||
),
|
||||
name="workspace",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/workspace-members/",
|
||||
WorkspaceMembersEndpoint.as_view(),
|
||||
name="workspace-members",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/teams/",
|
||||
TeamMemberViewSet.as_view(
|
||||
@@ -374,6 +410,31 @@ urlpatterns = [
|
||||
),
|
||||
name="workspace-themes",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-stats/<uuid:user_id>/",
|
||||
WorkspaceUserProfileStatsEndpoint.as_view(),
|
||||
name="workspace-user-stats",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-activity/<uuid:user_id>/",
|
||||
WorkspaceUserActivityEndpoint.as_view(),
|
||||
name="workspace-user-activity",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-profile/<uuid:user_id>/",
|
||||
WorkspaceUserProfileEndpoint.as_view(),
|
||||
name="workspace-user-profile-page",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/user-issues/<uuid:user_id>/",
|
||||
WorkspaceUserProfileIssuesEndpoint.as_view(),
|
||||
name="workspace-user-profile-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/labels/",
|
||||
WorkspaceLabelsEndpoint.as_view(),
|
||||
name="workspace-labels",
|
||||
),
|
||||
## End Workspaces ##
|
||||
# Projects
|
||||
path(
|
||||
@@ -424,6 +485,11 @@ urlpatterns = [
|
||||
),
|
||||
name="project",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/",
|
||||
ProjectMemberEndpoint.as_view(),
|
||||
name="project",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/members/add/",
|
||||
AddMemberToProjectEndpoint.as_view(),
|
||||
@@ -468,7 +534,6 @@ urlpatterns = [
|
||||
"workspaces/<str:slug>/user-favorite-projects/",
|
||||
ProjectFavoritesViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
@@ -766,6 +831,11 @@ urlpatterns = [
|
||||
IssueAttachmentEndpoint.as_view(),
|
||||
name="project-issue-attachments",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/export-issues/",
|
||||
ExportIssuesEndpoint.as_view(),
|
||||
name="export-issues",
|
||||
),
|
||||
## End Issues
|
||||
## Issue Activity
|
||||
path(
|
||||
@@ -798,6 +868,76 @@ urlpatterns = [
|
||||
name="project-issue-comment",
|
||||
),
|
||||
## End IssueComments
|
||||
# Issue Subscribers
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/",
|
||||
IssueSubscriberViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-subscribers",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/<uuid:subscriber_id>/",
|
||||
IssueSubscriberViewSet.as_view({"delete": "destroy"}),
|
||||
name="project-issue-subscribers",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/subscribe/",
|
||||
IssueSubscriberViewSet.as_view(
|
||||
{
|
||||
"get": "subscription_status",
|
||||
"post": "subscribe",
|
||||
"delete": "unsubscribe",
|
||||
}
|
||||
),
|
||||
name="project-issue-subscribers",
|
||||
),
|
||||
## End Issue Subscribers
|
||||
# Issue Reactions
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
||||
IssueReactionViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-reactions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
||||
IssueReactionViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-reactions",
|
||||
),
|
||||
## End Issue Reactions
|
||||
# Comment Reactions
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
||||
CommentReactionViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-comment-reactions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
||||
CommentReactionViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-comment-reactions",
|
||||
),
|
||||
## End Comment Reactions
|
||||
## IssueProperty
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-properties/",
|
||||
@@ -822,6 +962,36 @@ urlpatterns = [
|
||||
name="project-issue-roadmap",
|
||||
),
|
||||
## IssueProperty Ebd
|
||||
## Issue Archives
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
|
||||
IssueArchiveViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
name="project-issue-archive",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/<uuid:pk>/",
|
||||
IssueArchiveViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-archive",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/unarchive/<uuid:pk>/",
|
||||
IssueArchiveViewSet.as_view(
|
||||
{
|
||||
"post": "unarchive",
|
||||
}
|
||||
),
|
||||
name="project-issue-archive",
|
||||
),
|
||||
## End Issue Archives
|
||||
## File Assets
|
||||
path(
|
||||
"workspaces/<str:slug>/file-assets/",
|
||||
@@ -1172,7 +1342,7 @@ urlpatterns = [
|
||||
## End Importer
|
||||
# Search
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/search/",
|
||||
"workspaces/<str:slug>/search/",
|
||||
GlobalSearchEndpoint.as_view(),
|
||||
name="global-search",
|
||||
),
|
||||
@@ -1274,4 +1444,175 @@ urlpatterns = [
|
||||
name="default-analytics",
|
||||
),
|
||||
## End Analytics
|
||||
# Notification
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/",
|
||||
NotificationViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
}
|
||||
),
|
||||
name="notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/<uuid:pk>/",
|
||||
NotificationViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/<uuid:pk>/read/",
|
||||
NotificationViewSet.as_view(
|
||||
{
|
||||
"post": "mark_read",
|
||||
"delete": "mark_unread",
|
||||
}
|
||||
),
|
||||
name="notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/<uuid:pk>/archive/",
|
||||
NotificationViewSet.as_view(
|
||||
{
|
||||
"post": "archive",
|
||||
"delete": "unarchive",
|
||||
}
|
||||
),
|
||||
name="notifications",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/users/notifications/unread/",
|
||||
UnreadNotificationEndpoint.as_view(),
|
||||
name="unread-notifications",
|
||||
),
|
||||
## End Notification
|
||||
# Public Boards
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/",
|
||||
ProjectDeployBoardViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/<uuid:pk>/",
|
||||
ProjectDeployBoardViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/settings/",
|
||||
ProjectDeployBoardPublicSettingsEndpoint.as_view(),
|
||||
name="project-deploy-board-settings",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/",
|
||||
ProjectDeployBoardIssuesPublicEndpoint.as_view(),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
||||
IssueCommentPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="issue-comments-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
||||
IssueCommentPublicViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="issue-comments-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
||||
IssueReactionPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="issue-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
||||
IssueReactionPublicViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="issue-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
||||
CommentReactionPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="comment-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
||||
CommentReactionPublicViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="comment-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
||||
InboxIssuePublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
||||
InboxIssuePublicViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/votes/",
|
||||
IssueVotePublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="issue-vote-project-board",
|
||||
),
|
||||
## End Public Boards
|
||||
]
|
||||
|
||||
@@ -12,10 +12,15 @@ from .project import (
|
||||
ProjectUserViewsEndpoint,
|
||||
ProjectMemberUserEndpoint,
|
||||
ProjectFavoritesViewSet,
|
||||
ProjectDeployBoardIssuesPublicEndpoint,
|
||||
ProjectDeployBoardViewSet,
|
||||
ProjectDeployBoardPublicSettingsEndpoint,
|
||||
ProjectMemberEndpoint,
|
||||
)
|
||||
from .people import (
|
||||
from .user import (
|
||||
UserEndpoint,
|
||||
UpdateUserOnBoardedEndpoint,
|
||||
UpdateUserTourCompletedEndpoint,
|
||||
UserActivityEndpoint,
|
||||
)
|
||||
|
||||
@@ -41,6 +46,12 @@ from .workspace import (
|
||||
UserIssueCompletedGraphEndpoint,
|
||||
UserWorkspaceDashboardEndpoint,
|
||||
WorkspaceThemeViewSet,
|
||||
WorkspaceUserProfileStatsEndpoint,
|
||||
WorkspaceUserActivityEndpoint,
|
||||
WorkspaceUserProfileEndpoint,
|
||||
WorkspaceUserProfileIssuesEndpoint,
|
||||
WorkspaceLabelsEndpoint,
|
||||
WorkspaceMembersEndpoint,
|
||||
)
|
||||
from .state import StateViewSet
|
||||
from .view import IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet
|
||||
@@ -65,6 +76,14 @@ from .issue import (
|
||||
IssueLinkViewSet,
|
||||
BulkCreateIssueLabelsEndpoint,
|
||||
IssueAttachmentEndpoint,
|
||||
IssueArchiveViewSet,
|
||||
IssueSubscriberViewSet,
|
||||
IssueCommentPublicViewSet,
|
||||
CommentReactionViewSet,
|
||||
IssueReactionViewSet,
|
||||
IssueReactionPublicViewSet,
|
||||
CommentReactionPublicViewSet,
|
||||
IssueVotePublicViewSet,
|
||||
)
|
||||
|
||||
from .auth_extended import (
|
||||
@@ -132,7 +151,8 @@ from .estimate import (
|
||||
|
||||
from .release import ReleaseNotesEndpoint
|
||||
|
||||
from .inbox import InboxViewSet, InboxIssueViewSet
|
||||
from .inbox import InboxViewSet, InboxIssueViewSet, InboxIssuePublicViewSet
|
||||
|
||||
from .analytic import (
|
||||
AnalyticsEndpoint,
|
||||
AnalyticViewViewset,
|
||||
@@ -140,3 +160,9 @@ from .analytic import (
|
||||
ExportAnalyticsEndpoint,
|
||||
DefaultAnalyticsEndpoint,
|
||||
)
|
||||
|
||||
from .notification import NotificationViewSet, UnreadNotificationEndpoint
|
||||
|
||||
from .exporter import (
|
||||
ExportIssuesEndpoint,
|
||||
)
|
||||
@@ -79,12 +79,12 @@ class AnalyticsEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
assignee_details = {}
|
||||
if x_axis in ["assignees__email"] or segment in ["assignees__email"]:
|
||||
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
|
||||
assignee_details = (
|
||||
Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False)
|
||||
.order_by("assignees__id")
|
||||
.distinct("assignees__id")
|
||||
.values("assignees__avatar", "assignees__email", "assignees__first_name", "assignees__last_name")
|
||||
.values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id")
|
||||
)
|
||||
|
||||
|
||||
@@ -243,21 +243,21 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
||||
)
|
||||
most_issue_created_user = (
|
||||
queryset.exclude(created_by=None)
|
||||
.values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__email")
|
||||
.values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__display_name", "created_by__id")
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")
|
||||
)[:5]
|
||||
|
||||
most_issue_closed_user = (
|
||||
queryset.filter(completed_at__isnull=False, assignees__isnull=False)
|
||||
.values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__email")
|
||||
.values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id")
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")
|
||||
)[:5]
|
||||
|
||||
pending_issue_user = (
|
||||
queryset.filter(completed_at__isnull=True)
|
||||
.values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__email")
|
||||
.values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id")
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")
|
||||
)
|
||||
|
||||
@@ -22,7 +22,7 @@ from sentry_sdk import capture_exception
|
||||
|
||||
## Module imports
|
||||
from . import BaseAPIView
|
||||
from plane.api.serializers.people import (
|
||||
from plane.api.serializers import (
|
||||
ChangePasswordSerializer,
|
||||
ResetPasswordSerializer,
|
||||
)
|
||||
|
||||
@@ -279,6 +279,8 @@ class MagicSignInGenerateEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Clean up
|
||||
email = email.strip().lower()
|
||||
validate_email(email)
|
||||
|
||||
## Generate a random token
|
||||
@@ -345,8 +347,8 @@ class MagicSignInEndpoint(BaseAPIView):
|
||||
|
||||
def post(self, request):
|
||||
try:
|
||||
user_token = request.data.get("token", "").strip().lower()
|
||||
key = request.data.get("key", False)
|
||||
user_token = request.data.get("token", "").strip()
|
||||
key = request.data.get("key", False).strip().lower()
|
||||
|
||||
if not key or user_token == "":
|
||||
return Response(
|
||||
|
||||
@@ -31,6 +31,7 @@ from plane.api.serializers import (
|
||||
CycleIssueSerializer,
|
||||
CycleFavoriteSerializer,
|
||||
IssueStateSerializer,
|
||||
CycleWriteSerializer,
|
||||
)
|
||||
from plane.api.permissions import ProjectEntityPermission
|
||||
from plane.db.models import (
|
||||
@@ -164,6 +165,9 @@ class CycleViewSet(BaseViewSet):
|
||||
try:
|
||||
queryset = self.get_queryset()
|
||||
cycle_view = request.GET.get("cycle_view", "all")
|
||||
order_by = request.GET.get("order_by", "sort_order")
|
||||
|
||||
queryset = queryset.order_by(order_by)
|
||||
|
||||
# All Cycles
|
||||
if cycle_view == "all":
|
||||
@@ -338,7 +342,7 @@ class CycleViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = CycleSerializer(cycle, data=request.data, partial=True)
|
||||
serializer = CycleWriteSerializer(cycle, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -369,7 +373,8 @@ class CycleViewSet(BaseViewSet):
|
||||
.annotate(last_name=F("assignees__last_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.values("first_name", "last_name", "assignee_id", "avatar")
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.values("first_name", "last_name", "assignee_id", "avatar", "display_name")
|
||||
.annotate(total_issues=Count("assignee_id"))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
@@ -691,7 +696,6 @@ class CycleDateCheckEndpoint(BaseAPIView):
|
||||
return Response(
|
||||
{
|
||||
"error": "You have a cycle already on the given dates, if you want to create your draft cycle you can do that by removing dates",
|
||||
"cycles": CycleSerializer(cycles, many=True).data,
|
||||
"status": False,
|
||||
}
|
||||
)
|
||||
@@ -706,9 +710,6 @@ class CycleDateCheckEndpoint(BaseAPIView):
|
||||
|
||||
|
||||
class CycleFavoriteViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
serializer_class = CycleFavoriteSerializer
|
||||
model = CycleFavorite
|
||||
|
||||
100
apiserver/plane/api/views/exporter.py
Normal file
100
apiserver/plane/api/views/exporter.py
Normal file
@@ -0,0 +1,100 @@
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from . import BaseAPIView
|
||||
from plane.api.permissions import WorkSpaceAdminPermission
|
||||
from plane.bgtasks.export_task import issue_export_task
|
||||
from plane.db.models import Project, ExporterHistory, Workspace
|
||||
|
||||
from plane.api.serializers import ExporterHistorySerializer
|
||||
|
||||
|
||||
class ExportIssuesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
model = ExporterHistory
|
||||
serializer_class = ExporterHistorySerializer
|
||||
|
||||
def post(self, request, slug):
|
||||
try:
|
||||
# Get the workspace
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
provider = request.data.get("provider", False)
|
||||
multiple = request.data.get("multiple", False)
|
||||
project_ids = request.data.get("project", [])
|
||||
|
||||
if provider in ["csv", "xlsx", "json"]:
|
||||
if not project_ids:
|
||||
project_ids = Project.objects.filter(
|
||||
workspace__slug=slug
|
||||
).values_list("id", flat=True)
|
||||
project_ids = [str(project_id) for project_id in project_ids]
|
||||
|
||||
exporter = ExporterHistory.objects.create(
|
||||
workspace=workspace,
|
||||
project=project_ids,
|
||||
initiated_by=request.user,
|
||||
provider=provider,
|
||||
)
|
||||
|
||||
issue_export_task.delay(
|
||||
provider=exporter.provider,
|
||||
workspace_id=workspace.id,
|
||||
project_ids=project_ids,
|
||||
token_id=exporter.token,
|
||||
multiple=multiple,
|
||||
slug=slug,
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"message": f"Once the export is ready you will be able to download it"
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
else:
|
||||
return Response(
|
||||
{"error": f"Provider '{provider}' not found."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Workspace.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Workspace does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
exporter_history = ExporterHistory.objects.filter(
|
||||
workspace__slug=slug
|
||||
).select_related("workspace","initiated_by")
|
||||
|
||||
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=exporter_history,
|
||||
on_results=lambda exporter_history: ExporterHistorySerializer(
|
||||
exporter_history, many=True
|
||||
).data,
|
||||
)
|
||||
else:
|
||||
return Response(
|
||||
{"error": "per_page and cursor are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
@@ -30,31 +30,6 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
count = 0
|
||||
|
||||
# If logger is enabled check for request limit
|
||||
if settings.LOGGER_BASE_URL:
|
||||
try:
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
settings.LOGGER_BASE_URL,
|
||||
json={"user_id": str(request.user.id)},
|
||||
headers=headers,
|
||||
)
|
||||
count = response.json().get("count", 0)
|
||||
if not response.json().get("success", False):
|
||||
return Response(
|
||||
{
|
||||
"error": "You have surpassed the monthly limit for AI assistance"
|
||||
},
|
||||
status=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
|
||||
prompt = request.data.get("prompt", False)
|
||||
task = request.data.get("task", False)
|
||||
|
||||
@@ -67,7 +42,7 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
||||
|
||||
openai.api_key = settings.OPENAI_API_KEY
|
||||
response = openai.Completion.create(
|
||||
engine=settings.GPT_ENGINE,
|
||||
model=settings.GPT_ENGINE,
|
||||
prompt=final_text,
|
||||
temperature=0.7,
|
||||
max_tokens=1024,
|
||||
@@ -82,7 +57,6 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
||||
{
|
||||
"response": text,
|
||||
"response_html": text_html,
|
||||
"count": count,
|
||||
"project_detail": ProjectLiteSerializer(project).data,
|
||||
"workspace_detail": WorkspaceLiteSerializer(workspace).data,
|
||||
},
|
||||
|
||||
@@ -332,7 +332,7 @@ class BulkImportIssuesEndpoint(BaseAPIView):
|
||||
# if there is no default state assign any random state
|
||||
if default_state is None:
|
||||
default_state = State.objects.filter(
|
||||
~Q(name="Triage"), sproject_id=project_id
|
||||
~Q(name="Triage"), project_id=project_id
|
||||
).first()
|
||||
|
||||
# Get the maximum sequence_id
|
||||
@@ -458,7 +458,7 @@ class BulkImportIssuesEndpoint(BaseAPIView):
|
||||
actor=request.user,
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
comment=f"{request.user.email} importer the issue from {service}",
|
||||
comment=f"imported the issue from {service}",
|
||||
verb="created",
|
||||
created_by=request.user,
|
||||
)
|
||||
|
||||
@@ -15,7 +15,6 @@ from sentry_sdk import capture_exception
|
||||
from .base import BaseViewSet
|
||||
from plane.api.permissions import ProjectBasePermission, ProjectLitePermission
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
Inbox,
|
||||
InboxIssue,
|
||||
Issue,
|
||||
@@ -23,6 +22,7 @@ from plane.db.models import (
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
ProjectMember,
|
||||
ProjectDeployBoard,
|
||||
)
|
||||
from plane.api.serializers import (
|
||||
IssueSerializer,
|
||||
@@ -377,4 +377,269 @@ class InboxIssueViewSet(BaseViewSet):
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class InboxIssuePublicViewSet(BaseViewSet):
|
||||
serializer_class = InboxIssueSerializer
|
||||
model = InboxIssue
|
||||
|
||||
filterset_fields = [
|
||||
"status",
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=self.kwargs.get("slug"), project_id=self.kwargs.get("project_id"))
|
||||
if project_deploy_board is not None:
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
inbox_id=self.kwargs.get("inbox_id"),
|
||||
)
|
||||
.select_related("issue", "workspace", "project")
|
||||
)
|
||||
else:
|
||||
return InboxIssue.objects.none()
|
||||
|
||||
def list(self, request, slug, project_id, inbox_id):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issues = (
|
||||
Issue.objects.filter(
|
||||
issue_inbox__inbox_id=inbox_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(bridge_id=F("issue_inbox__id"))
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels")
|
||||
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_inbox",
|
||||
queryset=InboxIssue.objects.only(
|
||||
"status", "duplicate_to", "snoozed_till", "source"
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
issues_data = IssueStateInboxSerializer(issues, many=True).data
|
||||
return Response(
|
||||
issues_data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except ProjectDeployBoard.DoesNotExist:
|
||||
return Response({"error": "Project Deploy Board does not exist"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, inbox_id):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if not request.data.get("issue", {}).get("name", False):
|
||||
return Response(
|
||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Check for valid priority
|
||||
if not request.data.get("issue", {}).get("priority", None) in [
|
||||
"low",
|
||||
"medium",
|
||||
"high",
|
||||
"urgent",
|
||||
None,
|
||||
]:
|
||||
return Response(
|
||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Create or get state
|
||||
state, _ = State.objects.get_or_create(
|
||||
name="Triage",
|
||||
group="backlog",
|
||||
description="Default state for managing all Inbox Issues",
|
||||
project_id=project_id,
|
||||
color="#ff7700",
|
||||
)
|
||||
|
||||
# create an issue
|
||||
issue = Issue.objects.create(
|
||||
name=request.data.get("issue", {}).get("name"),
|
||||
description=request.data.get("issue", {}).get("description", {}),
|
||||
description_html=request.data.get("issue", {}).get(
|
||||
"description_html", "<p></p>"
|
||||
),
|
||||
priority=request.data.get("issue", {}).get("priority", "low"),
|
||||
project_id=project_id,
|
||||
state=state,
|
||||
)
|
||||
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
type="issue.activity.created",
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
)
|
||||
# create an inbox issue
|
||||
InboxIssue.objects.create(
|
||||
inbox_id=inbox_id,
|
||||
project_id=project_id,
|
||||
issue=issue,
|
||||
source=request.data.get("source", "in-app"),
|
||||
)
|
||||
|
||||
serializer = IssueStateInboxSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, project_id, inbox_id, pk):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
# Get the project member
|
||||
if str(inbox_issue.created_by_id) != str(request.user.id):
|
||||
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Get issue data
|
||||
issue_data = request.data.pop("issue", False)
|
||||
|
||||
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
# viewers and guests since only viewers and guests
|
||||
issue_data = {
|
||||
"name": issue_data.get("name", issue.name),
|
||||
"description_html": issue_data.get("description_html", issue.description_html),
|
||||
"description": issue_data.get("description", issue.description)
|
||||
}
|
||||
|
||||
issue_serializer = IssueCreateSerializer(
|
||||
issue, data=issue_data, partial=True
|
||||
)
|
||||
|
||||
if issue_serializer.is_valid():
|
||||
current_instance = issue
|
||||
# Log all the updates
|
||||
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
|
||||
if issue is not None:
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(current_instance).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
)
|
||||
issue_serializer.save()
|
||||
return Response(issue_serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except InboxIssue.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Inbox Issue does not exist"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def retrieve(self, request, slug, project_id, inbox_id, pk):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = IssueStateInboxSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, project_id, inbox_id, pk):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
||||
)
|
||||
|
||||
if str(inbox_issue.created_by_id) != str(request.user.id):
|
||||
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
inbox_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except InboxIssue.DoesNotExist:
|
||||
return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
@@ -20,6 +20,17 @@ class SlackProjectSyncViewSet(BaseViewSet):
|
||||
serializer_class = SlackProjectSyncSerializer
|
||||
model = SlackProjectSync
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
)
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, workspace_integration_id):
|
||||
try:
|
||||
serializer = SlackProjectSyncSerializer(data=request.data)
|
||||
@@ -45,7 +56,10 @@ class SlackProjectSyncViewSet(BaseViewSet):
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except IntegrityError:
|
||||
return Response({"error": "Slack is already enabled for the project"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(
|
||||
{"error": "Slack is already enabled for the project"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except WorkspaceIntegration.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Workspace Integration does not exist"},
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -53,6 +53,8 @@ class ModuleViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
def get_queryset(self):
|
||||
order_by = self.request.GET.get("order_by", "sort_order")
|
||||
|
||||
subquery = ModuleFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
module_id=OuterRef("pk"),
|
||||
@@ -106,7 +108,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
filter=Q(issue_module__issue__state__group="backlog"),
|
||||
)
|
||||
)
|
||||
.order_by("-is_favorite", "name")
|
||||
.order_by(order_by, "name")
|
||||
)
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
@@ -173,8 +175,9 @@ class ModuleViewSet(BaseViewSet):
|
||||
.annotate(first_name=F("assignees__first_name"))
|
||||
.annotate(last_name=F("assignees__last_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.values("first_name", "last_name", "assignee_id", "avatar")
|
||||
.values("first_name", "last_name", "assignee_id", "avatar", "display_name")
|
||||
.annotate(total_issues=Count("assignee_id"))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
@@ -480,9 +483,6 @@ class ModuleLinkViewSet(BaseViewSet):
|
||||
|
||||
|
||||
class ModuleFavoriteViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
serializer_class = ModuleFavoriteSerializer
|
||||
model = ModuleFavorite
|
||||
|
||||
276
apiserver/plane/api/views/notification.py
Normal file
276
apiserver/plane/api/views/notification.py
Normal file
@@ -0,0 +1,276 @@
|
||||
# Django imports
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from sentry_sdk import capture_exception
|
||||
from plane.utils.paginator import BasePaginator
|
||||
|
||||
# Module imports
|
||||
from .base import BaseViewSet, BaseAPIView
|
||||
from plane.db.models import Notification, IssueAssignee, IssueSubscriber, Issue, WorkspaceMember
|
||||
from plane.api.serializers import NotificationSerializer
|
||||
|
||||
|
||||
class NotificationViewSet(BaseViewSet, BasePaginator):
|
||||
model = Notification
|
||||
serializer_class = NotificationSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
receiver_id=self.request.user.id,
|
||||
)
|
||||
.select_related("workspace", "project," "triggered_by", "receiver")
|
||||
)
|
||||
|
||||
def list(self, request, slug):
|
||||
try:
|
||||
snoozed = request.GET.get("snoozed", "false")
|
||||
archived = request.GET.get("archived", "false")
|
||||
read = request.GET.get("read", "true")
|
||||
|
||||
# Filter type
|
||||
type = request.GET.get("type", "all")
|
||||
|
||||
notifications = (
|
||||
Notification.objects.filter(
|
||||
workspace__slug=slug, receiver_id=request.user.id
|
||||
)
|
||||
.select_related("workspace", "project", "triggered_by", "receiver")
|
||||
.order_by("snoozed_till", "-created_at")
|
||||
)
|
||||
|
||||
# Filter for snoozed notifications
|
||||
if snoozed == "false":
|
||||
notifications = notifications.filter(
|
||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
||||
)
|
||||
|
||||
if snoozed == "true":
|
||||
notifications = notifications.filter(
|
||||
Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False)
|
||||
)
|
||||
|
||||
if read == "false":
|
||||
notifications = notifications.filter(read_at__isnull=True)
|
||||
|
||||
# Filter for archived or unarchive
|
||||
if archived == "false":
|
||||
notifications = notifications.filter(archived_at__isnull=True)
|
||||
|
||||
if archived == "true":
|
||||
notifications = notifications.filter(archived_at__isnull=False)
|
||||
|
||||
# Subscribed issues
|
||||
if type == "watching":
|
||||
issue_ids = IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug, subscriber_id=request.user.id
|
||||
).values_list("issue_id", flat=True)
|
||||
notifications = notifications.filter(entity_identifier__in=issue_ids)
|
||||
|
||||
# Assigned Issues
|
||||
if type == "assigned":
|
||||
issue_ids = IssueAssignee.objects.filter(
|
||||
workspace__slug=slug, assignee_id=request.user.id
|
||||
).values_list("issue_id", flat=True)
|
||||
notifications = notifications.filter(entity_identifier__in=issue_ids)
|
||||
|
||||
# Created issues
|
||||
if type == "created":
|
||||
if WorkspaceMember.objects.filter(workspace__slug=slug, member=request.user, role__lt=15).exists():
|
||||
notifications = Notification.objects.none()
|
||||
else:
|
||||
issue_ids = Issue.objects.filter(
|
||||
workspace__slug=slug, created_by=request.user
|
||||
).values_list("pk", flat=True)
|
||||
notifications = notifications.filter(entity_identifier__in=issue_ids)
|
||||
|
||||
# Pagination
|
||||
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(notifications),
|
||||
on_results=lambda notifications: NotificationSerializer(
|
||||
notifications, many=True
|
||||
).data,
|
||||
)
|
||||
|
||||
serializer = NotificationSerializer(notifications, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
workspace__slug=slug, pk=pk, receiver=request.user
|
||||
)
|
||||
# Only read_at and snoozed_till can be updated
|
||||
notification_data = {
|
||||
"snoozed_till": request.data.get("snoozed_till", None),
|
||||
}
|
||||
serializer = NotificationSerializer(
|
||||
notification, data=notification_data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def mark_read(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.read_at = timezone.now()
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def mark_unread(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.read_at = None
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def archive(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.archived_at = timezone.now()
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
def unarchive(self, request, slug, pk):
|
||||
try:
|
||||
notification = Notification.objects.get(
|
||||
receiver=request.user, workspace__slug=slug, pk=pk
|
||||
)
|
||||
notification.archived_at = None
|
||||
notification.save()
|
||||
serializer = NotificationSerializer(notification)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Notification.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Notification does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class UnreadNotificationEndpoint(BaseAPIView):
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
# Watching Issues Count
|
||||
watching_issues_count = Notification.objects.filter(
|
||||
workspace__slug=slug,
|
||||
receiver_id=request.user.id,
|
||||
read_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
entity_identifier__in=IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug, subscriber_id=request.user.id
|
||||
).values_list("issue_id", flat=True),
|
||||
).count()
|
||||
|
||||
# My Issues Count
|
||||
my_issues_count = Notification.objects.filter(
|
||||
workspace__slug=slug,
|
||||
receiver_id=request.user.id,
|
||||
read_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
entity_identifier__in=IssueAssignee.objects.filter(
|
||||
workspace__slug=slug, assignee_id=request.user.id
|
||||
).values_list("issue_id", flat=True),
|
||||
).count()
|
||||
|
||||
# Created Issues Count
|
||||
created_issues_count = Notification.objects.filter(
|
||||
workspace__slug=slug,
|
||||
receiver_id=request.user.id,
|
||||
read_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
entity_identifier__in=Issue.objects.filter(
|
||||
workspace__slug=slug, created_by=request.user
|
||||
).values_list("pk", flat=True),
|
||||
).count()
|
||||
|
||||
return Response(
|
||||
{
|
||||
"watching_issues": watching_issues_count,
|
||||
"my_issues": my_issues_count,
|
||||
"created_issues": created_issues_count,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
@@ -301,7 +301,7 @@ class CreateIssueFromPageBlockEndpoint(BaseAPIView):
|
||||
issue=issue,
|
||||
actor=request.user,
|
||||
project_id=project_id,
|
||||
comment=f"{request.user.email} created the issue from {page_block.name} block",
|
||||
comment=f"created the issue from {page_block.name} block",
|
||||
verb="created",
|
||||
)
|
||||
|
||||
|
||||
@@ -5,7 +5,21 @@ from datetime import datetime
|
||||
# Django imports
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import Q, Exists, OuterRef, Func, F
|
||||
from django.db.models import (
|
||||
Q,
|
||||
Exists,
|
||||
OuterRef,
|
||||
Func,
|
||||
F,
|
||||
Max,
|
||||
CharField,
|
||||
Func,
|
||||
Subquery,
|
||||
Prefetch,
|
||||
When,
|
||||
Case,
|
||||
Value,
|
||||
)
|
||||
from django.core.validators import validate_email
|
||||
from django.conf import settings
|
||||
|
||||
@@ -13,6 +27,7 @@ from django.conf import settings
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework import serializers
|
||||
from rest_framework.permissions import AllowAny
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
@@ -23,9 +38,16 @@ from plane.api.serializers import (
|
||||
ProjectDetailSerializer,
|
||||
ProjectMemberInviteSerializer,
|
||||
ProjectFavoriteSerializer,
|
||||
IssueLiteSerializer,
|
||||
ProjectDeployBoardSerializer,
|
||||
ProjectMemberAdminSerializer,
|
||||
)
|
||||
|
||||
from plane.api.permissions import ProjectBasePermission
|
||||
from plane.api.permissions import (
|
||||
ProjectBasePermission,
|
||||
ProjectEntityPermission,
|
||||
ProjectMemberPermission,
|
||||
)
|
||||
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
@@ -48,9 +70,17 @@ from plane.db.models import (
|
||||
IssueAssignee,
|
||||
ModuleMember,
|
||||
Inbox,
|
||||
ProjectDeployBoard,
|
||||
Issue,
|
||||
IssueReaction,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
Label,
|
||||
)
|
||||
|
||||
from plane.bgtasks.project_invitation_task import project_invitation
|
||||
from plane.utils.grouper import group_results
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
|
||||
class ProjectViewSet(BaseViewSet):
|
||||
@@ -91,20 +121,61 @@ class ProjectViewSet(BaseViewSet):
|
||||
)
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
total_members=ProjectMember.objects.filter(
|
||||
project_id=OuterRef("id"), member__is_bot=False
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
total_cycles=Cycle.objects.filter(project_id=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
total_modules=Module.objects.filter(project_id=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
member_role=ProjectMember.objects.filter(
|
||||
project_id=OuterRef("pk"),
|
||||
member_id=self.request.user.id,
|
||||
).values("role")
|
||||
)
|
||||
.annotate(
|
||||
is_deployed=Exists(
|
||||
ProjectDeployBoard.objects.filter(
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
)
|
||||
)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def list(self, request, slug):
|
||||
try:
|
||||
is_favorite = request.GET.get("is_favorite", "all")
|
||||
subquery = ProjectFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
sort_order_query = ProjectMember.objects.filter(
|
||||
member=request.user,
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
).values("sort_order")
|
||||
projects = (
|
||||
self.get_queryset()
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.order_by("-is_favorite", "name")
|
||||
.annotate(sort_order=Subquery(sort_order_query))
|
||||
.order_by("sort_order", "name")
|
||||
.annotate(
|
||||
total_members=ProjectMember.objects.filter(
|
||||
project_id=OuterRef("id")
|
||||
@@ -126,6 +197,12 @@ class ProjectViewSet(BaseViewSet):
|
||||
.values("count")
|
||||
)
|
||||
)
|
||||
|
||||
if is_favorite == "true":
|
||||
projects = projects.filter(is_favorite=True)
|
||||
if is_favorite == "false":
|
||||
projects = projects.filter(is_favorite=False)
|
||||
|
||||
return Response(ProjectDetailSerializer(projects, many=True).data)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
@@ -145,40 +222,49 @@ class ProjectViewSet(BaseViewSet):
|
||||
serializer.save()
|
||||
|
||||
# Add the user as Administrator to the project
|
||||
ProjectMember.objects.create(
|
||||
project_member = ProjectMember.objects.create(
|
||||
project_id=serializer.data["id"], member=request.user, role=20
|
||||
)
|
||||
|
||||
if serializer.data["project_lead"] is not None and str(
|
||||
serializer.data["project_lead"]
|
||||
) != str(request.user.id):
|
||||
ProjectMember.objects.create(
|
||||
project_id=serializer.data["id"],
|
||||
member_id=serializer.data["project_lead"],
|
||||
role=20,
|
||||
)
|
||||
|
||||
# Default states
|
||||
states = [
|
||||
{
|
||||
"name": "Backlog",
|
||||
"color": "#5e6ad2",
|
||||
"color": "#A3A3A3",
|
||||
"sequence": 15000,
|
||||
"group": "backlog",
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"name": "Todo",
|
||||
"color": "#eb5757",
|
||||
"color": "#3A3A3A",
|
||||
"sequence": 25000,
|
||||
"group": "unstarted",
|
||||
},
|
||||
{
|
||||
"name": "In Progress",
|
||||
"color": "#26b5ce",
|
||||
"color": "#F59E0B",
|
||||
"sequence": 35000,
|
||||
"group": "started",
|
||||
},
|
||||
{
|
||||
"name": "Done",
|
||||
"color": "#f2c94c",
|
||||
"color": "#16A34A",
|
||||
"sequence": 45000,
|
||||
"group": "completed",
|
||||
},
|
||||
{
|
||||
"name": "Cancelled",
|
||||
"color": "#4cb782",
|
||||
"color": "#EF4444",
|
||||
"sequence": 55000,
|
||||
"group": "cancelled",
|
||||
},
|
||||
@@ -200,9 +286,11 @@ class ProjectViewSet(BaseViewSet):
|
||||
]
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
data = serializer.data
|
||||
data["sort_order"] = project_member.sort_order
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
return Response(
|
||||
[serializer.errors[error][0] for error in serializer.errors],
|
||||
serializer.errors,
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except IntegrityError as e:
|
||||
@@ -259,7 +347,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
group="backlog",
|
||||
description="Default state for managing all Inbox Issues",
|
||||
project_id=pk,
|
||||
color="#ff7700"
|
||||
color="#ff7700",
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -307,7 +395,9 @@ class InviteProjectEndpoint(BaseAPIView):
|
||||
validate_email(email)
|
||||
# Check if user is already a member of workspace
|
||||
if ProjectMember.objects.filter(
|
||||
project_id=project_id, member__email=email
|
||||
project_id=project_id,
|
||||
member__email=email,
|
||||
member__is_bot=False,
|
||||
).exists():
|
||||
return Response(
|
||||
{"error": "User is already member of workspace"},
|
||||
@@ -411,14 +501,14 @@ class UserProjectInvitationsViewset(BaseViewSet):
|
||||
|
||||
|
||||
class ProjectMemberViewSet(BaseViewSet):
|
||||
serializer_class = ProjectMemberSerializer
|
||||
serializer_class = ProjectMemberAdminSerializer
|
||||
model = ProjectMember
|
||||
permission_classes = [
|
||||
ProjectBasePermission,
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
"member__email",
|
||||
"member__display_name",
|
||||
"member__first_name",
|
||||
]
|
||||
|
||||
@@ -550,45 +640,66 @@ class AddMemberToProjectEndpoint(BaseAPIView):
|
||||
|
||||
def post(self, request, slug, project_id):
|
||||
try:
|
||||
member_id = request.data.get("member_id", False)
|
||||
role = request.data.get("role", False)
|
||||
members = request.data.get("members", [])
|
||||
|
||||
if not member_id or not role:
|
||||
# get the project
|
||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||
|
||||
if not len(members):
|
||||
return Response(
|
||||
{"error": "Member ID and role is required"},
|
||||
{"error": "Atleast one member is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
bulk_project_members = []
|
||||
|
||||
# Check if the user is a member in the workspace
|
||||
if not WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug, member_id=member_id
|
||||
).exists():
|
||||
# TODO: Update this error message - nk
|
||||
return Response(
|
||||
{
|
||||
"error": "User is not a member of the workspace. Invite the user to the workspace to add him to project"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
project_members = (
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member_id__in=[member.get("member_id") for member in members],
|
||||
)
|
||||
|
||||
# Check if the user is already member of project
|
||||
if ProjectMember.objects.filter(
|
||||
project=project_id, member_id=member_id
|
||||
).exists():
|
||||
return Response(
|
||||
{"error": "User is already a member of the project"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Add the user to project
|
||||
project_member = ProjectMember.objects.create(
|
||||
project_id=project_id, member_id=member_id, role=role
|
||||
.values("member_id", "sort_order")
|
||||
.order_by("sort_order")
|
||||
)
|
||||
|
||||
serializer = ProjectMemberSerializer(project_member)
|
||||
for member in members:
|
||||
sort_order = [
|
||||
project_member.get("sort_order")
|
||||
for project_member in project_members
|
||||
if str(project_member.get("member_id"))
|
||||
== str(member.get("member_id"))
|
||||
]
|
||||
bulk_project_members.append(
|
||||
ProjectMember(
|
||||
member_id=member.get("member_id"),
|
||||
role=member.get("role", 10),
|
||||
project_id=project_id,
|
||||
workspace_id=project.workspace_id,
|
||||
sort_order=sort_order[0] - 10000 if len(sort_order) else 65535,
|
||||
)
|
||||
)
|
||||
|
||||
project_members = ProjectMember.objects.bulk_create(
|
||||
bulk_project_members,
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
serializer = ProjectMemberSerializer(project_members, many=True)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
except KeyError:
|
||||
return Response(
|
||||
{"error": "Incorrect data sent"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Project.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Project does not exist"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except IntegrityError:
|
||||
return Response(
|
||||
{"error": "User not member of the workspace"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
@@ -810,11 +921,15 @@ class ProjectUserViewsEndpoint(BaseAPIView):
|
||||
|
||||
view_props = project_member.view_props
|
||||
default_props = project_member.default_props
|
||||
preferences = project_member.preferences
|
||||
sort_order = project_member.sort_order
|
||||
|
||||
project_member.view_props = request.data.get("view_props", view_props)
|
||||
project_member.default_props = request.data.get(
|
||||
"default_props", default_props
|
||||
)
|
||||
project_member.preferences = request.data.get("preferences", preferences)
|
||||
project_member.sort_order = request.data.get("sort_order", sort_order)
|
||||
|
||||
project_member.save()
|
||||
|
||||
@@ -919,3 +1034,255 @@ class ProjectFavoritesViewSet(BaseViewSet):
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class ProjectDeployBoardViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectMemberPermission,
|
||||
]
|
||||
serializer_class = ProjectDeployBoardSerializer
|
||||
model = ProjectDeployBoard
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
)
|
||||
.select_related("project")
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
try:
|
||||
comments = request.data.get("comments", False)
|
||||
reactions = request.data.get("reactions", False)
|
||||
inbox = request.data.get("inbox", None)
|
||||
votes = request.data.get("votes", False)
|
||||
views = request.data.get(
|
||||
"views",
|
||||
{
|
||||
"list": True,
|
||||
"kanban": True,
|
||||
"calendar": True,
|
||||
"gantt": True,
|
||||
"spreadsheet": True,
|
||||
},
|
||||
)
|
||||
|
||||
project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create(
|
||||
anchor=f"{slug}/{project_id}",
|
||||
project_id=project_id,
|
||||
)
|
||||
project_deploy_board.comments = comments
|
||||
project_deploy_board.reactions = reactions
|
||||
project_deploy_board.inbox = inbox
|
||||
project_deploy_board.votes = votes
|
||||
project_deploy_board.views = views
|
||||
|
||||
project_deploy_board.save()
|
||||
|
||||
serializer = ProjectDeployBoardSerializer(project_deploy_board)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class ProjectMemberEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
project_members = ProjectMember.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
member__is_bot=False,
|
||||
).select_related("project", "member")
|
||||
serializer = ProjectMemberSerializer(project_members, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = ProjectDeployBoardSerializer(project_deploy_board)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
except ProjectDeployBoard.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Project Deploy Board does not exists"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class ProjectDeployBoardIssuesPublicEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
# Custom ordering for priority and state
|
||||
priority_order = ["urgent", "high", "medium", "low", None]
|
||||
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
|
||||
issue_queryset = (
|
||||
Issue.issue_objects.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.filter(project_id=project_id)
|
||||
.filter(workspace__slug=slug)
|
||||
.select_related("project", "workspace", "state", "parent")
|
||||
.prefetch_related("assignees", "labels")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
queryset=IssueReaction.objects.select_related("actor"),
|
||||
)
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(module_id=F("issue_module__module_id"))
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
)
|
||||
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
priority_order = (
|
||||
priority_order
|
||||
if order_by_param == "priority"
|
||||
else priority_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
|
||||
# State Ordering
|
||||
elif order_by_param in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
"-state__name",
|
||||
"-state__group",
|
||||
]:
|
||||
state_order = (
|
||||
state_order
|
||||
if order_by_param in ["state__name", "state__group"]
|
||||
else state_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
state_order=Case(
|
||||
*[
|
||||
When(state__group=state_group, then=Value(i))
|
||||
for i, state_group in enumerate(state_order)
|
||||
],
|
||||
default=Value(len(state_order)),
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("state_order")
|
||||
# assignee and label ordering
|
||||
elif order_by_param in [
|
||||
"labels__name",
|
||||
"-labels__name",
|
||||
"assignees__first_name",
|
||||
"-assignees__first_name",
|
||||
]:
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
max_values=Max(
|
||||
order_by_param[1::]
|
||||
if order_by_param.startswith("-")
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-max_values" if order_by_param.startswith("-") else "max_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True).data
|
||||
|
||||
states = State.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values("name", "group", "color", "id")
|
||||
|
||||
labels = Label.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values("id", "name", "color", "parent")
|
||||
|
||||
## Grouping the results
|
||||
group_by = request.GET.get("group_by", False)
|
||||
if group_by:
|
||||
issues = group_results(issues, group_by)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"issues": issues,
|
||||
"states": states,
|
||||
"labels": labels,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except ProjectDeployBoard.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Board does not exists"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -20,7 +20,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
also show related workspace if found
|
||||
"""
|
||||
|
||||
def filter_workspaces(self, query, slug, project_id):
|
||||
def filter_workspaces(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
@@ -31,8 +31,8 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
.values("name", "id", "slug")
|
||||
)
|
||||
|
||||
def filter_projects(self, query, slug, project_id):
|
||||
fields = ["name"]
|
||||
def filter_projects(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name", "identifier"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
@@ -46,8 +46,8 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
.values("name", "id", "identifier", "workspace__slug")
|
||||
)
|
||||
|
||||
def filter_issues(self, query, slug, project_id):
|
||||
fields = ["name", "sequence_id"]
|
||||
def filter_issues(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name", "sequence_id", "project__identifier"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
if field == "sequence_id":
|
||||
@@ -56,111 +56,123 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
q |= Q(**{"sequence_id": sequence_id})
|
||||
else:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Issue.issue_objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
issues = Issue.issue_objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def filter_cycles(self, query, slug, project_id):
|
||||
if workspace_search == "false" and project_id:
|
||||
issues = issues.filter(project_id=project_id)
|
||||
|
||||
return issues.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
def filter_cycles(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Cycle.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
cycles = Cycle.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def filter_modules(self, query, slug, project_id):
|
||||
if workspace_search == "false" and project_id:
|
||||
cycles = cycles.filter(project_id=project_id)
|
||||
|
||||
return cycles.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
def filter_modules(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Module.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
modules = Module.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def filter_pages(self, query, slug, project_id):
|
||||
if workspace_search == "false" and project_id:
|
||||
modules = modules.filter(project_id=project_id)
|
||||
|
||||
return modules.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
def filter_pages(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
Page.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
pages = Page.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def filter_views(self, query, slug, project_id):
|
||||
if workspace_search == "false" and project_id:
|
||||
pages = pages.filter(project_id=project_id)
|
||||
|
||||
return pages.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
def filter_views(self, query, slug, project_id, workspace_search):
|
||||
fields = ["name"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
q |= Q(**{f"{field}__icontains": query})
|
||||
return (
|
||||
IssueView.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.distinct()
|
||||
.values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
issue_views = IssueView.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
if workspace_search == "false" and project_id:
|
||||
issue_views = issue_views.filter(project_id=project_id)
|
||||
|
||||
return issue_views.distinct().values(
|
||||
"name",
|
||||
"id",
|
||||
"project_id",
|
||||
"project__identifier",
|
||||
"workspace__slug",
|
||||
)
|
||||
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
query = request.query_params.get("search", False)
|
||||
workspace_search = request.query_params.get("workspace_search", "false")
|
||||
project_id = request.query_params.get("project_id", False)
|
||||
|
||||
if not query:
|
||||
return Response(
|
||||
{
|
||||
@@ -191,7 +203,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
|
||||
for model in MODELS_MAPPER.keys():
|
||||
func = MODELS_MAPPER.get(model, None)
|
||||
results[model] = func(query, slug, project_id)
|
||||
results[model] = func(query, slug, project_id, workspace_search)
|
||||
return Response({"results": results}, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
@@ -206,6 +218,7 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, project_id):
|
||||
try:
|
||||
query = request.query_params.get("search", False)
|
||||
workspace_search = request.query_params.get("workspace_search", "false")
|
||||
parent = request.query_params.get("parent", "false")
|
||||
blocker_blocked_by = request.query_params.get("blocker_blocked_by", "false")
|
||||
cycle = request.query_params.get("cycle", "false")
|
||||
@@ -216,10 +229,12 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
|
||||
issues = Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
)
|
||||
|
||||
if workspace_search == "false":
|
||||
issues = issues.filter(project_id=project_id)
|
||||
|
||||
if query:
|
||||
issues = search_issues(query, issues)
|
||||
|
||||
@@ -251,12 +266,12 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
if module == "true":
|
||||
issues = issues.exclude(issue_module__isnull=False)
|
||||
|
||||
|
||||
return Response(
|
||||
issues.values(
|
||||
"name",
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project__name",
|
||||
"project__identifier",
|
||||
"project_id",
|
||||
"workspace__slug",
|
||||
|
||||
@@ -37,7 +37,9 @@ class UserEndpoint(BaseViewSet):
|
||||
workspace_invites = WorkspaceMemberInvite.objects.filter(
|
||||
email=request.user.email
|
||||
).count()
|
||||
assigned_issues = Issue.issue_objects.filter(assignees__in=[request.user]).count()
|
||||
assigned_issues = Issue.issue_objects.filter(
|
||||
assignees__in=[request.user]
|
||||
).count()
|
||||
|
||||
serialized_data = UserSerializer(request.user).data
|
||||
serialized_data["workspace"] = {
|
||||
@@ -47,7 +49,9 @@ class UserEndpoint(BaseViewSet):
|
||||
"fallback_workspace_slug": workspace.slug,
|
||||
"invites": workspace_invites,
|
||||
}
|
||||
serialized_data.setdefault("issues", {})["assigned_issues"] = assigned_issues
|
||||
serialized_data.setdefault("issues", {})[
|
||||
"assigned_issues"
|
||||
] = assigned_issues
|
||||
|
||||
return Response(
|
||||
serialized_data,
|
||||
@@ -59,11 +63,15 @@ class UserEndpoint(BaseViewSet):
|
||||
workspace_invites = WorkspaceMemberInvite.objects.filter(
|
||||
email=request.user.email
|
||||
).count()
|
||||
assigned_issues = Issue.issue_objects.filter(assignees__in=[request.user]).count()
|
||||
assigned_issues = Issue.issue_objects.filter(
|
||||
assignees__in=[request.user]
|
||||
).count()
|
||||
|
||||
fallback_workspace = Workspace.objects.filter(
|
||||
workspace_member__member=request.user
|
||||
).order_by("created_at").first()
|
||||
fallback_workspace = (
|
||||
Workspace.objects.filter(workspace_member__member=request.user)
|
||||
.order_by("created_at")
|
||||
.first()
|
||||
)
|
||||
|
||||
serialized_data = UserSerializer(request.user).data
|
||||
|
||||
@@ -78,7 +86,9 @@ class UserEndpoint(BaseViewSet):
|
||||
else None,
|
||||
"invites": workspace_invites,
|
||||
}
|
||||
serialized_data.setdefault("issues", {})["assigned_issues"] = assigned_issues
|
||||
serialized_data.setdefault("issues", {})[
|
||||
"assigned_issues"
|
||||
] = assigned_issues
|
||||
|
||||
return Response(
|
||||
serialized_data,
|
||||
@@ -109,11 +119,28 @@ class UpdateUserOnBoardedEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
|
||||
class UpdateUserTourCompletedEndpoint(BaseAPIView):
|
||||
def patch(self, request):
|
||||
try:
|
||||
user = User.objects.get(pk=request.user.id)
|
||||
user.is_tour_completed = request.data.get("is_tour_completed", False)
|
||||
user.save()
|
||||
return Response(
|
||||
{"message": "Updated successfully"}, status=status.HTTP_200_OK
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class UserActivityEndpoint(BaseAPIView, BasePaginator):
|
||||
def get(self, request):
|
||||
try:
|
||||
queryset = IssueActivity.objects.filter(actor=request.user).select_related(
|
||||
"actor", "workspace"
|
||||
"actor", "workspace", "issue", "project"
|
||||
)
|
||||
|
||||
return self.paginate(
|
||||
@@ -19,6 +19,7 @@ from plane.db.models import (
|
||||
IssueView,
|
||||
Issue,
|
||||
IssueViewFavorite,
|
||||
IssueReaction,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
@@ -77,6 +78,12 @@ class ViewIssuesEndpoint(BaseAPIView):
|
||||
.select_related("parent")
|
||||
.prefetch_related("assignees")
|
||||
.prefetch_related("labels")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
queryset=IssueReaction.objects.select_related("actor"),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
serializer = IssueLiteSerializer(issues, many=True)
|
||||
|
||||
@@ -3,6 +3,7 @@ import jwt
|
||||
from datetime import date, datetime
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from uuid import uuid4
|
||||
|
||||
# Django imports
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import Prefetch
|
||||
@@ -12,15 +13,22 @@ from django.core.exceptions import ValidationError
|
||||
from django.core.validators import validate_email
|
||||
from django.contrib.sites.shortcuts import get_current_site
|
||||
from django.db.models import (
|
||||
CharField,
|
||||
Count,
|
||||
Prefetch,
|
||||
OuterRef,
|
||||
Func,
|
||||
F,
|
||||
Q,
|
||||
Count,
|
||||
Case,
|
||||
Value,
|
||||
CharField,
|
||||
When,
|
||||
Max,
|
||||
IntegerField,
|
||||
)
|
||||
from django.db.models.functions import ExtractWeek, Cast, ExtractDay
|
||||
from django.db.models.fields import DateField
|
||||
from django.contrib.auth.hashers import make_password
|
||||
|
||||
# Third party modules
|
||||
from rest_framework import status
|
||||
@@ -37,6 +45,9 @@ from plane.api.serializers import (
|
||||
UserLiteSerializer,
|
||||
ProjectMemberSerializer,
|
||||
WorkspaceThemeSerializer,
|
||||
IssueActivitySerializer,
|
||||
IssueLiteSerializer,
|
||||
WorkspaceMemberAdminSerializer,
|
||||
)
|
||||
from plane.api.views.base import BaseAPIView
|
||||
from . import BaseViewSet
|
||||
@@ -58,9 +69,24 @@ from plane.db.models import (
|
||||
PageFavorite,
|
||||
Page,
|
||||
IssueViewFavorite,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
IssueSubscriber,
|
||||
Project,
|
||||
Label,
|
||||
WorkspaceMember,
|
||||
CycleIssue,
|
||||
IssueReaction,
|
||||
)
|
||||
from plane.api.permissions import (
|
||||
WorkSpaceBasePermission,
|
||||
WorkSpaceAdminPermission,
|
||||
WorkspaceEntityPermission,
|
||||
WorkspaceViewerPermission,
|
||||
)
|
||||
from plane.api.permissions import WorkSpaceBasePermission, WorkSpaceAdminPermission
|
||||
from plane.bgtasks.workspace_invitation_task import workspace_invitation
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.grouper import group_results
|
||||
|
||||
|
||||
class WorkSpaceViewSet(BaseViewSet):
|
||||
@@ -81,7 +107,9 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
|
||||
def get_queryset(self):
|
||||
member_count = (
|
||||
WorkspaceMember.objects.filter(workspace=OuterRef("id"))
|
||||
WorkspaceMember.objects.filter(
|
||||
workspace=OuterRef("id"), member__is_bot=False
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
@@ -93,14 +121,34 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
return self.filter_queryset(
|
||||
super().get_queryset().select_related("owner")
|
||||
).order_by("name").filter(workspace_member__member=self.request.user).annotate(total_members=member_count).annotate(total_issues=issue_count)
|
||||
return (
|
||||
self.filter_queryset(super().get_queryset().select_related("owner"))
|
||||
.order_by("name")
|
||||
.filter(workspace_member__member=self.request.user)
|
||||
.annotate(total_members=member_count)
|
||||
.annotate(total_issues=issue_count)
|
||||
.select_related("owner")
|
||||
)
|
||||
|
||||
def create(self, request):
|
||||
try:
|
||||
serializer = WorkSpaceSerializer(data=request.data)
|
||||
|
||||
slug = request.data.get("slug", False)
|
||||
name = request.data.get("name", False)
|
||||
|
||||
if not name or not slug:
|
||||
return Response(
|
||||
{"error": "Both name and slug are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if len(name) > 80 or len(slug) > 48:
|
||||
return Response(
|
||||
{"error": "The maximum length for name is 80 and for slug is 48"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save(owner=request.user)
|
||||
# Create Workspace member
|
||||
@@ -146,7 +194,9 @@ class UserWorkSpacesEndpoint(BaseAPIView):
|
||||
def get(self, request):
|
||||
try:
|
||||
member_count = (
|
||||
WorkspaceMember.objects.filter(workspace=OuterRef("id"))
|
||||
WorkspaceMember.objects.filter(
|
||||
workspace=OuterRef("id"), member__is_bot=False
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
@@ -160,14 +210,20 @@ class UserWorkSpacesEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
workspace = (
|
||||
Workspace.objects.prefetch_related(
|
||||
Prefetch("workspace_member", queryset=WorkspaceMember.objects.all())
|
||||
(
|
||||
Workspace.objects.prefetch_related(
|
||||
Prefetch(
|
||||
"workspace_member", queryset=WorkspaceMember.objects.all()
|
||||
)
|
||||
)
|
||||
.filter(
|
||||
workspace_member__member=request.user,
|
||||
)
|
||||
.select_related("owner")
|
||||
)
|
||||
.filter(
|
||||
workspace_member__member=request.user,
|
||||
)
|
||||
.select_related("owner")
|
||||
).annotate(total_members=member_count).annotate(total_issues=issue_count)
|
||||
.annotate(total_members=member_count)
|
||||
.annotate(total_issues=issue_count)
|
||||
)
|
||||
|
||||
serializer = WorkSpaceSerializer(self.filter_queryset(workspace), many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -216,9 +272,20 @@ class InviteWorkspaceEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
# check for role level
|
||||
requesting_user = WorkspaceMember.objects.get(workspace__slug=slug, member=request.user)
|
||||
if len([email for email in emails if int(email.get("role", 10)) > requesting_user.role]):
|
||||
return Response({"error": "You cannot invite a user with higher role"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
requesting_user = WorkspaceMember.objects.get(
|
||||
workspace__slug=slug, member=request.user
|
||||
)
|
||||
if len(
|
||||
[
|
||||
email
|
||||
for email in emails
|
||||
if int(email.get("role", 10)) > requesting_user.role
|
||||
]
|
||||
):
|
||||
return Response(
|
||||
{"error": "You cannot invite a user with higher role"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
@@ -276,14 +343,18 @@ class InviteWorkspaceEndpoint(BaseAPIView):
|
||||
|
||||
# create the user if signup is disabled
|
||||
if settings.DOCKERIZED and not settings.ENABLE_SIGNUP:
|
||||
_ = User.objects.bulk_create([
|
||||
User(
|
||||
email=email.get("email"),
|
||||
password=str(uuid4().hex),
|
||||
is_password_autoset=True
|
||||
)
|
||||
for email in emails
|
||||
], batch_size=100)
|
||||
_ = User.objects.bulk_create(
|
||||
[
|
||||
User(
|
||||
username=str(uuid4().hex),
|
||||
email=invitation.email,
|
||||
password=make_password(uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
)
|
||||
for invitation in workspace_invitations
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
for invitation in workspace_invitations:
|
||||
workspace_invitation.delay(
|
||||
@@ -400,6 +471,30 @@ class WorkspaceInvitationsViewset(BaseViewSet):
|
||||
.select_related("workspace", "workspace__owner", "created_by")
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, pk):
|
||||
try:
|
||||
workspace_member_invite = WorkspaceMemberInvite.objects.get(
|
||||
pk=pk, workspace__slug=slug
|
||||
)
|
||||
# delete the user if signup is disabled
|
||||
if settings.DOCKERIZED and not settings.ENABLE_SIGNUP:
|
||||
user = User.objects.filter(email=workspace_member_invite.email).first()
|
||||
if user is not None:
|
||||
user.delete()
|
||||
workspace_member_invite.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except WorkspaceMemberInvite.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Workspace member invite does not exists"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class UserWorkspaceInvitationsEndpoint(BaseViewSet):
|
||||
serializer_class = WorkSpaceMemberInviteSerializer
|
||||
@@ -447,7 +542,7 @@ class UserWorkspaceInvitationsEndpoint(BaseViewSet):
|
||||
|
||||
|
||||
class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
serializer_class = WorkSpaceMemberSerializer
|
||||
serializer_class = WorkspaceMemberAdminSerializer
|
||||
model = WorkspaceMember
|
||||
|
||||
permission_classes = [
|
||||
@@ -455,7 +550,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
"member__email",
|
||||
"member__display_name",
|
||||
"member__first_name",
|
||||
]
|
||||
|
||||
@@ -530,6 +625,21 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check for the only member in the workspace
|
||||
if (
|
||||
workspace_member.role == 20
|
||||
and WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
role=20,
|
||||
member__is_bot=False,
|
||||
).count()
|
||||
== 1
|
||||
):
|
||||
return Response(
|
||||
{"error": "Cannot delete the only Admin for the workspace"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Delete the user also from all the projects
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug, member=workspace_member.member
|
||||
@@ -587,7 +697,7 @@ class TeamMemberViewSet(BaseViewSet):
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
"member__email",
|
||||
"member__display_name",
|
||||
"member__first_name",
|
||||
]
|
||||
|
||||
@@ -865,7 +975,9 @@ class UserWorkspaceDashboardEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
state_distribution = (
|
||||
Issue.issue_objects.filter(workspace__slug=slug, assignees__in=[request.user])
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug, assignees__in=[request.user]
|
||||
)
|
||||
.annotate(state_group=F("state__group"))
|
||||
.values("state_group")
|
||||
.annotate(state_count=Count("state_group"))
|
||||
@@ -882,11 +994,11 @@ class UserWorkspaceDashboardEndpoint(BaseAPIView):
|
||||
|
||||
upcoming_issues = Issue.issue_objects.filter(
|
||||
~Q(state__group__in=["completed", "cancelled"]),
|
||||
target_date__gte=timezone.now(),
|
||||
start_date__gte=timezone.now(),
|
||||
workspace__slug=slug,
|
||||
assignees__in=[request.user],
|
||||
completed_at__isnull=True,
|
||||
).values("id", "name", "workspace__slug", "project_id", "target_date")
|
||||
).values("id", "name", "workspace__slug", "project_id", "start_date")
|
||||
|
||||
return Response(
|
||||
{
|
||||
@@ -940,3 +1052,424 @@ class WorkspaceThemeViewSet(BaseViewSet):
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, user_id):
|
||||
try:
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
state_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(state_group=F("state__group"))
|
||||
.values("state_group")
|
||||
.annotate(state_count=Count("state_group"))
|
||||
.order_by("state_group")
|
||||
)
|
||||
|
||||
priority_order = ["urgent", "high", "medium", "low", None]
|
||||
|
||||
priority_distribution = (
|
||||
Issue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.values("priority")
|
||||
.annotate(priority_count=Count("priority"))
|
||||
.filter(priority_count__gte=1)
|
||||
.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
default=Value(len(priority_order)),
|
||||
output_field=IntegerField(),
|
||||
)
|
||||
)
|
||||
.order_by("priority_order")
|
||||
)
|
||||
|
||||
created_issues = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
project__project_projectmember__member=request.user,
|
||||
created_by_id=user_id,
|
||||
)
|
||||
.filter(**filters)
|
||||
.count()
|
||||
)
|
||||
|
||||
assigned_issues_count = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.count()
|
||||
)
|
||||
|
||||
pending_issues_count = (
|
||||
Issue.issue_objects.filter(
|
||||
~Q(state__group__in=["completed", "cancelled"]),
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.count()
|
||||
)
|
||||
|
||||
completed_issues_count = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
state__group="completed",
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.count()
|
||||
)
|
||||
|
||||
subscribed_issues_count = (
|
||||
IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug,
|
||||
subscriber_id=user_id,
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.count()
|
||||
)
|
||||
|
||||
upcoming_cycles = CycleIssue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
cycle__start_date__gt=timezone.now().date(),
|
||||
issue__assignees__in=[
|
||||
user_id,
|
||||
],
|
||||
).values("cycle__name", "cycle__id", "cycle__project_id")
|
||||
|
||||
present_cycle = CycleIssue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
cycle__start_date__lt=timezone.now().date(),
|
||||
cycle__end_date__gt=timezone.now().date(),
|
||||
issue__assignees__in=[
|
||||
user_id,
|
||||
],
|
||||
).values("cycle__name", "cycle__id", "cycle__project_id")
|
||||
|
||||
return Response(
|
||||
{
|
||||
"state_distribution": state_distribution,
|
||||
"priority_distribution": priority_distribution,
|
||||
"created_issues": created_issues,
|
||||
"assigned_issues": assigned_issues_count,
|
||||
"completed_issues": completed_issues_count,
|
||||
"pending_issues": pending_issues_count,
|
||||
"subscribed_issues": subscribed_issues_count,
|
||||
"present_cycles": present_cycle,
|
||||
"upcoming_cycles": upcoming_cycles,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceUserActivityEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, user_id):
|
||||
try:
|
||||
projects = request.query_params.getlist("project", [])
|
||||
|
||||
queryset = IssueActivity.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
actor=user_id,
|
||||
).select_related("actor", "workspace", "issue", "project")
|
||||
|
||||
if projects:
|
||||
queryset = queryset.filter(project__in=projects)
|
||||
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=queryset,
|
||||
on_results=lambda issue_activities: IssueActivitySerializer(
|
||||
issue_activities, many=True
|
||||
).data,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceUserProfileEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, user_id):
|
||||
try:
|
||||
user_data = User.objects.get(pk=user_id)
|
||||
|
||||
requesting_workspace_member = WorkspaceMember.objects.get(
|
||||
workspace__slug=slug, member=request.user
|
||||
)
|
||||
projects = []
|
||||
if requesting_workspace_member.role >= 10:
|
||||
projects = (
|
||||
Project.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_projectmember__member=request.user,
|
||||
)
|
||||
.annotate(
|
||||
created_issues=Count(
|
||||
"project_issue",
|
||||
filter=Q(project_issue__created_by_id=user_id),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
assigned_issues=Count(
|
||||
"project_issue",
|
||||
filter=Q(project_issue__assignees__in=[user_id]),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"project_issue",
|
||||
filter=Q(
|
||||
project_issue__completed_at__isnull=False,
|
||||
project_issue__assignees__in=[user_id],
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"project_issue",
|
||||
filter=Q(
|
||||
project_issue__state__group__in=[
|
||||
"backlog",
|
||||
"unstarted",
|
||||
"started",
|
||||
],
|
||||
project_issue__assignees__in=[user_id],
|
||||
),
|
||||
)
|
||||
)
|
||||
.values(
|
||||
"id",
|
||||
"name",
|
||||
"identifier",
|
||||
"emoji",
|
||||
"icon_prop",
|
||||
"created_issues",
|
||||
"assigned_issues",
|
||||
"completed_issues",
|
||||
"pending_issues",
|
||||
)
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"project_data": projects,
|
||||
"user_data": {
|
||||
"email": user_data.email,
|
||||
"first_name": user_data.first_name,
|
||||
"last_name": user_data.last_name,
|
||||
"avatar": user_data.avatar,
|
||||
"cover_image": user_data.cover_image,
|
||||
"date_joined": user_data.date_joined,
|
||||
"user_timezone": user_data.user_timezone,
|
||||
"display_name": user_data.display_name,
|
||||
},
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except WorkspaceMember.DoesNotExist:
|
||||
return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceViewerPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, user_id):
|
||||
try:
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
issue_queryset = (
|
||||
Issue.issue_objects.filter(
|
||||
Q(assignees__in=[user_id])
|
||||
| Q(created_by_id=user_id)
|
||||
| Q(issue_subscribers__subscriber_id=user_id),
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.select_related("project", "workspace", "state", "parent")
|
||||
.prefetch_related("assignees", "labels")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
queryset=IssueReaction.objects.select_related("actor"),
|
||||
)
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
).distinct()
|
||||
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
priority_order = (
|
||||
priority_order
|
||||
if order_by_param == "priority"
|
||||
else priority_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
|
||||
# State Ordering
|
||||
elif order_by_param in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
"-state__name",
|
||||
"-state__group",
|
||||
]:
|
||||
state_order = (
|
||||
state_order
|
||||
if order_by_param in ["state__name", "state__group"]
|
||||
else state_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
state_order=Case(
|
||||
*[
|
||||
When(state__group=state_group, then=Value(i))
|
||||
for i, state_group in enumerate(state_order)
|
||||
],
|
||||
default=Value(len(state_order)),
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("state_order")
|
||||
# assignee and label ordering
|
||||
elif order_by_param in [
|
||||
"labels__name",
|
||||
"-labels__name",
|
||||
"assignees__first_name",
|
||||
"-assignees__first_name",
|
||||
]:
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
max_values=Max(
|
||||
order_by_param[1::]
|
||||
if order_by_param.startswith("-")
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-max_values" if order_by_param.startswith("-") else "max_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True).data
|
||||
|
||||
## Grouping the results
|
||||
group_by = request.GET.get("group_by", False)
|
||||
if group_by:
|
||||
return Response(
|
||||
group_results(issues, group_by), status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceLabelsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceViewerPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
labels = Label.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
).values("parent", "name", "color", "id", "project_id", "workspace__slug")
|
||||
return Response(labels, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceMembersEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug):
|
||||
try:
|
||||
workspace_members = WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member__is_bot=False,
|
||||
).select_related("workspace", "member")
|
||||
serialzier = WorkSpaceMemberSerializer(workspace_members, many=True)
|
||||
return Response(serialzier.data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return Response(
|
||||
{"error": "Something went wrong please try again later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -21,7 +21,7 @@ row_mapping = {
|
||||
"state__name": "State",
|
||||
"state__group": "State Group",
|
||||
"labels__name": "Label",
|
||||
"assignees__email": "Assignee Name",
|
||||
"assignees__display_name": "Assignee Name",
|
||||
"start_date": "Start Date",
|
||||
"target_date": "Due Date",
|
||||
"completed_at": "Completed At",
|
||||
@@ -51,12 +51,12 @@ def analytic_export_task(email, data, slug):
|
||||
segmented = segment
|
||||
|
||||
assignee_details = {}
|
||||
if x_axis in ["assignees__email"] or segment in ["assignees__email"]:
|
||||
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
|
||||
assignee_details = (
|
||||
Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False)
|
||||
.order_by("assignees__id")
|
||||
.distinct("assignees__id")
|
||||
.values("assignees__avatar", "assignees__email", "assignees__first_name", "assignees__last_name")
|
||||
.values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id")
|
||||
)
|
||||
|
||||
if segment:
|
||||
@@ -93,19 +93,19 @@ def analytic_export_task(email, data, slug):
|
||||
else:
|
||||
generated_row.append("0")
|
||||
# x-axis replacement for names
|
||||
if x_axis in ["assignees__email"]:
|
||||
assignee = [user for user in assignee_details if str(user.get("assignees__email")) == str(item)]
|
||||
if x_axis in ["assignees__id"]:
|
||||
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)]
|
||||
if len(assignee):
|
||||
generated_row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
|
||||
rows.append(tuple(generated_row))
|
||||
|
||||
# If segment is ["assignees__email"] then replace segment_zero rows with first and last names
|
||||
if segmented in ["assignees__email"]:
|
||||
# If segment is ["assignees__display_name"] then replace segment_zero rows with first and last names
|
||||
if segmented in ["assignees__id"]:
|
||||
for index, segm in enumerate(row_zero[2:]):
|
||||
# find the name of the user
|
||||
assignee = [user for user in assignee_details if str(user.get("assignees__email")) == str(segm)]
|
||||
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(segm)]
|
||||
if len(assignee):
|
||||
row_zero[index] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
|
||||
row_zero[index + 2] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
|
||||
|
||||
rows = [tuple(row_zero)] + rows
|
||||
csv_buffer = io.StringIO()
|
||||
@@ -141,8 +141,8 @@ def analytic_export_task(email, data, slug):
|
||||
else distribution.get(item)[0].get("estimate "),
|
||||
]
|
||||
# x-axis replacement to names
|
||||
if x_axis in ["assignees__email"]:
|
||||
assignee = [user for user in assignee_details if str(user.get("assignees__email")) == str(item)]
|
||||
if x_axis in ["assignees__id"]:
|
||||
assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)]
|
||||
if len(assignee):
|
||||
row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name"))
|
||||
|
||||
|
||||
346
apiserver/plane/bgtasks/export_task.py
Normal file
346
apiserver/plane/bgtasks/export_task.py
Normal file
@@ -0,0 +1,346 @@
|
||||
# Python imports
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
import boto3
|
||||
import zipfile
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
from botocore.client import Config
|
||||
from openpyxl import Workbook
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import Issue, ExporterHistory
|
||||
|
||||
|
||||
def dateTimeConverter(time):
|
||||
if time:
|
||||
return time.strftime("%a, %d %b %Y %I:%M:%S %Z%z")
|
||||
|
||||
def dateConverter(time):
|
||||
if time:
|
||||
return time.strftime("%a, %d %b %Y")
|
||||
|
||||
def create_csv_file(data):
|
||||
csv_buffer = io.StringIO()
|
||||
csv_writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
|
||||
|
||||
for row in data:
|
||||
csv_writer.writerow(row)
|
||||
|
||||
csv_buffer.seek(0)
|
||||
return csv_buffer.getvalue()
|
||||
|
||||
|
||||
def create_json_file(data):
|
||||
return json.dumps(data)
|
||||
|
||||
|
||||
def create_xlsx_file(data):
|
||||
workbook = Workbook()
|
||||
sheet = workbook.active
|
||||
|
||||
for row in data:
|
||||
sheet.append(row)
|
||||
|
||||
xlsx_buffer = io.BytesIO()
|
||||
workbook.save(xlsx_buffer)
|
||||
xlsx_buffer.seek(0)
|
||||
return xlsx_buffer.getvalue()
|
||||
|
||||
|
||||
def create_zip_file(files):
|
||||
zip_buffer = io.BytesIO()
|
||||
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
for filename, file_content in files:
|
||||
zipf.writestr(filename, file_content)
|
||||
|
||||
zip_buffer.seek(0)
|
||||
return zip_buffer
|
||||
|
||||
|
||||
def upload_to_s3(zip_file, workspace_id, token_id, slug):
|
||||
s3 = boto3.client(
|
||||
"s3",
|
||||
region_name=settings.AWS_REGION,
|
||||
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
||||
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
||||
config=Config(signature_version="s3v4"),
|
||||
)
|
||||
file_name = f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip"
|
||||
|
||||
s3.upload_fileobj(
|
||||
zip_file,
|
||||
settings.AWS_S3_BUCKET_NAME,
|
||||
file_name,
|
||||
ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"},
|
||||
)
|
||||
|
||||
expires_in = 7 * 24 * 60 * 60
|
||||
presigned_url = s3.generate_presigned_url(
|
||||
"get_object",
|
||||
Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name},
|
||||
ExpiresIn=expires_in,
|
||||
)
|
||||
|
||||
exporter_instance = ExporterHistory.objects.get(token=token_id)
|
||||
|
||||
if presigned_url:
|
||||
exporter_instance.url = presigned_url
|
||||
exporter_instance.status = "completed"
|
||||
exporter_instance.key = file_name
|
||||
else:
|
||||
exporter_instance.status = "failed"
|
||||
|
||||
exporter_instance.save(update_fields=["status", "url","key"])
|
||||
|
||||
|
||||
def generate_table_row(issue):
|
||||
return [
|
||||
f"""{issue["project__identifier"]}-{issue["sequence_id"]}""",
|
||||
issue["project__name"],
|
||||
issue["name"],
|
||||
issue["description_stripped"],
|
||||
issue["state__name"],
|
||||
issue["priority"],
|
||||
f"{issue['created_by__first_name']} {issue['created_by__last_name']}"
|
||||
if issue["created_by__first_name"] and issue["created_by__last_name"]
|
||||
else "",
|
||||
f"{issue['assignees__first_name']} {issue['assignees__last_name']}"
|
||||
if issue["assignees__first_name"] and issue["assignees__last_name"]
|
||||
else "",
|
||||
issue["labels__name"],
|
||||
issue["issue_cycle__cycle__name"],
|
||||
dateConverter(issue["issue_cycle__cycle__start_date"]),
|
||||
dateConverter(issue["issue_cycle__cycle__end_date"]),
|
||||
issue["issue_module__module__name"],
|
||||
dateConverter(issue["issue_module__module__start_date"]),
|
||||
dateConverter(issue["issue_module__module__target_date"]),
|
||||
dateTimeConverter(issue["created_at"]),
|
||||
dateTimeConverter(issue["updated_at"]),
|
||||
dateTimeConverter(issue["completed_at"]),
|
||||
dateTimeConverter(issue["archived_at"]),
|
||||
]
|
||||
|
||||
|
||||
def generate_json_row(issue):
|
||||
return {
|
||||
"ID": f"""{issue["project__identifier"]}-{issue["sequence_id"]}""",
|
||||
"Project": issue["project__name"],
|
||||
"Name": issue["name"],
|
||||
"Description": issue["description_stripped"],
|
||||
"State": issue["state__name"],
|
||||
"Priority": issue["priority"],
|
||||
"Created By": f"{issue['created_by__first_name']} {issue['created_by__last_name']}"
|
||||
if issue["created_by__first_name"] and issue["created_by__last_name"]
|
||||
else "",
|
||||
"Assignee": f"{issue['assignees__first_name']} {issue['assignees__last_name']}"
|
||||
if issue["assignees__first_name"] and issue["assignees__last_name"]
|
||||
else "",
|
||||
"Labels": issue["labels__name"],
|
||||
"Cycle Name": issue["issue_cycle__cycle__name"],
|
||||
"Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]),
|
||||
"Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]),
|
||||
"Module Name": issue["issue_module__module__name"],
|
||||
"Module Start Date": dateConverter(issue["issue_module__module__start_date"]),
|
||||
"Module Target Date": dateConverter(issue["issue_module__module__target_date"]),
|
||||
"Created At": dateTimeConverter(issue["created_at"]),
|
||||
"Updated At": dateTimeConverter(issue["updated_at"]),
|
||||
"Completed At": dateTimeConverter(issue["completed_at"]),
|
||||
"Archived At": dateTimeConverter(issue["archived_at"]),
|
||||
}
|
||||
|
||||
|
||||
def update_json_row(rows, row):
|
||||
matched_index = next(
|
||||
(
|
||||
index
|
||||
for index, existing_row in enumerate(rows)
|
||||
if existing_row["ID"] == row["ID"]
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
if matched_index is not None:
|
||||
existing_assignees, existing_labels = (
|
||||
rows[matched_index]["Assignee"],
|
||||
rows[matched_index]["Labels"],
|
||||
)
|
||||
assignee, label = row["Assignee"], row["Labels"]
|
||||
|
||||
if assignee is not None and assignee not in existing_assignees:
|
||||
rows[matched_index]["Assignee"] += f", {assignee}"
|
||||
if label is not None and label not in existing_labels:
|
||||
rows[matched_index]["Labels"] += f", {label}"
|
||||
else:
|
||||
rows.append(row)
|
||||
|
||||
|
||||
def update_table_row(rows, row):
|
||||
matched_index = next(
|
||||
(index for index, existing_row in enumerate(rows) if existing_row[0] == row[0]),
|
||||
None,
|
||||
)
|
||||
|
||||
if matched_index is not None:
|
||||
existing_assignees, existing_labels = rows[matched_index][7:9]
|
||||
assignee, label = row[7:9]
|
||||
|
||||
if assignee is not None and assignee not in existing_assignees:
|
||||
rows[matched_index][7] += f", {assignee}"
|
||||
if label is not None and label not in existing_labels:
|
||||
rows[matched_index][8] += f", {label}"
|
||||
else:
|
||||
rows.append(row)
|
||||
|
||||
|
||||
def generate_csv(header, project_id, issues, files):
|
||||
"""
|
||||
Generate CSV export for all the passed issues.
|
||||
"""
|
||||
rows = [
|
||||
header,
|
||||
]
|
||||
for issue in issues:
|
||||
row = generate_table_row(issue)
|
||||
update_table_row(rows, row)
|
||||
csv_file = create_csv_file(rows)
|
||||
files.append((f"{project_id}.csv", csv_file))
|
||||
|
||||
|
||||
def generate_json(header, project_id, issues, files):
|
||||
rows = []
|
||||
for issue in issues:
|
||||
row = generate_json_row(issue)
|
||||
update_json_row(rows, row)
|
||||
json_file = create_json_file(rows)
|
||||
files.append((f"{project_id}.json", json_file))
|
||||
|
||||
|
||||
def generate_xlsx(header, project_id, issues, files):
|
||||
rows = [header]
|
||||
for issue in issues:
|
||||
row = generate_table_row(issue)
|
||||
update_table_row(rows, row)
|
||||
xlsx_file = create_xlsx_file(rows)
|
||||
files.append((f"{project_id}.xlsx", xlsx_file))
|
||||
|
||||
|
||||
@shared_task
|
||||
def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, slug):
|
||||
try:
|
||||
exporter_instance = ExporterHistory.objects.get(token=token_id)
|
||||
exporter_instance.status = "processing"
|
||||
exporter_instance.save(update_fields=["status"])
|
||||
|
||||
workspace_issues = (
|
||||
(
|
||||
Issue.objects.filter(
|
||||
workspace__id=workspace_id, project_id__in=project_ids
|
||||
)
|
||||
.select_related("project", "workspace", "state", "parent", "created_by")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_cycle__cycle", "issue_module__module"
|
||||
)
|
||||
.values(
|
||||
"id",
|
||||
"project__identifier",
|
||||
"project__name",
|
||||
"project__id",
|
||||
"sequence_id",
|
||||
"name",
|
||||
"description_stripped",
|
||||
"priority",
|
||||
"state__name",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"completed_at",
|
||||
"archived_at",
|
||||
"issue_cycle__cycle__name",
|
||||
"issue_cycle__cycle__start_date",
|
||||
"issue_cycle__cycle__end_date",
|
||||
"issue_module__module__name",
|
||||
"issue_module__module__start_date",
|
||||
"issue_module__module__target_date",
|
||||
"created_by__first_name",
|
||||
"created_by__last_name",
|
||||
"assignees__first_name",
|
||||
"assignees__last_name",
|
||||
"labels__name",
|
||||
)
|
||||
)
|
||||
.order_by("project__identifier","sequence_id")
|
||||
.distinct()
|
||||
)
|
||||
# CSV header
|
||||
header = [
|
||||
"ID",
|
||||
"Project",
|
||||
"Name",
|
||||
"Description",
|
||||
"State",
|
||||
"Priority",
|
||||
"Created By",
|
||||
"Assignee",
|
||||
"Labels",
|
||||
"Cycle Name",
|
||||
"Cycle Start Date",
|
||||
"Cycle End Date",
|
||||
"Module Name",
|
||||
"Module Start Date",
|
||||
"Module Target Date",
|
||||
"Created At",
|
||||
"Updated At",
|
||||
"Completed At",
|
||||
"Archived At",
|
||||
]
|
||||
|
||||
EXPORTER_MAPPER = {
|
||||
"csv": generate_csv,
|
||||
"json": generate_json,
|
||||
"xlsx": generate_xlsx,
|
||||
}
|
||||
|
||||
files = []
|
||||
if multiple:
|
||||
for project_id in project_ids:
|
||||
issues = workspace_issues.filter(project__id=project_id)
|
||||
exporter = EXPORTER_MAPPER.get(provider)
|
||||
if exporter is not None:
|
||||
exporter(
|
||||
header,
|
||||
project_id,
|
||||
issues,
|
||||
files,
|
||||
)
|
||||
|
||||
else:
|
||||
exporter = EXPORTER_MAPPER.get(provider)
|
||||
if exporter is not None:
|
||||
exporter(
|
||||
header,
|
||||
workspace_id,
|
||||
workspace_issues,
|
||||
files,
|
||||
)
|
||||
|
||||
zip_buffer = create_zip_file(files)
|
||||
upload_to_s3(zip_buffer, workspace_id, token_id, slug)
|
||||
|
||||
except Exception as e:
|
||||
exporter_instance = ExporterHistory.objects.get(token=token_id)
|
||||
exporter_instance.status = "failed"
|
||||
exporter_instance.reason = str(e)
|
||||
exporter_instance.save(update_fields=["status", "reason"])
|
||||
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
38
apiserver/plane/bgtasks/exporter_expired_task.py
Normal file
38
apiserver/plane/bgtasks/exporter_expired_task.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Python imports
|
||||
import boto3
|
||||
from datetime import timedelta
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from botocore.client import Config
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import ExporterHistory
|
||||
|
||||
|
||||
@shared_task
|
||||
def delete_old_s3_link():
|
||||
# Get a list of keys and IDs to process
|
||||
expired_exporter_history = ExporterHistory.objects.filter(
|
||||
Q(url__isnull=False) & Q(created_at__lte=timezone.now() - timedelta(days=8))
|
||||
).values_list("key", "id")
|
||||
|
||||
s3 = boto3.client(
|
||||
"s3",
|
||||
region_name="ap-south-1",
|
||||
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
||||
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
||||
config=Config(signature_version="s3v4"),
|
||||
)
|
||||
|
||||
for file_name, exporter_id in expired_exporter_history:
|
||||
# Delete object from S3
|
||||
if file_name:
|
||||
s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name)
|
||||
|
||||
ExporterHistory.objects.filter(id=exporter_id).update(url=None)
|
||||
@@ -5,6 +5,7 @@ import requests
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.utils import timezone
|
||||
|
||||
# Third Party imports
|
||||
from celery import shared_task
|
||||
@@ -20,6 +21,9 @@ from plane.db.models import (
|
||||
State,
|
||||
Cycle,
|
||||
Module,
|
||||
IssueSubscriber,
|
||||
Notification,
|
||||
IssueAssignee,
|
||||
)
|
||||
from plane.api.serializers import IssueActivitySerializer
|
||||
|
||||
@@ -44,7 +48,7 @@ def track_name(
|
||||
field="name",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the name to {requested_data.get('name')}",
|
||||
comment=f"updated the name to {requested_data.get('name')}",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -66,12 +70,12 @@ def track_parent(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=f"{project.identifier}-{old_parent.sequence_id}",
|
||||
old_value=f"{old_parent.project.identifier}-{old_parent.sequence_id}",
|
||||
new_value=None,
|
||||
field="parent",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the parent issue to None",
|
||||
comment=f"updated the parent issue to None",
|
||||
old_identifier=old_parent.id,
|
||||
new_identifier=None,
|
||||
)
|
||||
@@ -84,14 +88,14 @@ def track_parent(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=f"{project.identifier}-{old_parent.sequence_id}"
|
||||
old_value=f"{old_parent.project.identifier}-{old_parent.sequence_id}"
|
||||
if old_parent is not None
|
||||
else None,
|
||||
new_value=f"{project.identifier}-{new_parent.sequence_id}",
|
||||
new_value=f"{new_parent.project.identifier}-{new_parent.sequence_id}",
|
||||
field="parent",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the parent issue to {new_parent.name}",
|
||||
comment=f"updated the parent issue to {new_parent.name}",
|
||||
old_identifier=old_parent.id if old_parent is not None else None,
|
||||
new_identifier=new_parent.id,
|
||||
)
|
||||
@@ -119,7 +123,7 @@ def track_priority(
|
||||
field="priority",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the priority to None",
|
||||
comment=f"updated the priority to None",
|
||||
)
|
||||
)
|
||||
else:
|
||||
@@ -133,7 +137,7 @@ def track_priority(
|
||||
field="priority",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the priority to {requested_data.get('priority')}",
|
||||
comment=f"updated the priority to {requested_data.get('priority')}",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -161,7 +165,7 @@ def track_state(
|
||||
field="state",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the state to {new_state.name}",
|
||||
comment=f"updated the state to {new_state.name}",
|
||||
old_identifier=old_state.id,
|
||||
new_identifier=new_state.id,
|
||||
)
|
||||
@@ -180,19 +184,24 @@ def track_description(
|
||||
if current_instance.get("description_html") != requested_data.get(
|
||||
"description_html"
|
||||
):
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=current_instance.get("description_html"),
|
||||
new_value=requested_data.get("description_html"),
|
||||
field="description",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the description to {requested_data.get('description_html')}",
|
||||
)
|
||||
)
|
||||
last_activity = IssueActivity.objects.filter(issue_id=issue_id).order_by("-created_at").first()
|
||||
if(last_activity is not None and last_activity.field == "description" and actor.id == last_activity.actor_id):
|
||||
last_activity.created_at = timezone.now()
|
||||
last_activity.save(update_fields=["created_at"])
|
||||
else:
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=current_instance.get("description_html"),
|
||||
new_value=requested_data.get("description_html"),
|
||||
field="description",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"updated the description to {requested_data.get('description_html')}",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
# Track changes in issue target date
|
||||
@@ -216,7 +225,7 @@ def track_target_date(
|
||||
field="target_date",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the target date to None",
|
||||
comment=f"updated the target date to None",
|
||||
)
|
||||
)
|
||||
else:
|
||||
@@ -230,7 +239,7 @@ def track_target_date(
|
||||
field="target_date",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the target date to {requested_data.get('target_date')}",
|
||||
comment=f"updated the target date to {requested_data.get('target_date')}",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -256,7 +265,7 @@ def track_start_date(
|
||||
field="start_date",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the start date to None",
|
||||
comment=f"updated the start date to None",
|
||||
)
|
||||
)
|
||||
else:
|
||||
@@ -270,7 +279,7 @@ def track_start_date(
|
||||
field="start_date",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the start date to {requested_data.get('start_date')}",
|
||||
comment=f"updated the start date to {requested_data.get('start_date')}",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -299,7 +308,7 @@ def track_labels(
|
||||
field="labels",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} added label {label.name}",
|
||||
comment=f"added label {label.name}",
|
||||
new_identifier=label.id,
|
||||
old_identifier=None,
|
||||
)
|
||||
@@ -320,7 +329,7 @@ def track_labels(
|
||||
field="labels",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} removed label {label.name}",
|
||||
comment=f"removed label {label.name}",
|
||||
old_identifier=label.id,
|
||||
new_identifier=None,
|
||||
)
|
||||
@@ -349,12 +358,12 @@ def track_assignees(
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value="",
|
||||
new_value=assignee.email,
|
||||
new_value=assignee.display_name,
|
||||
field="assignees",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} added assignee {assignee.email}",
|
||||
new_identifier=actor.id,
|
||||
comment=f"added assignee {assignee.display_name}",
|
||||
new_identifier=assignee.id,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -370,13 +379,13 @@ def track_assignees(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=assignee.email,
|
||||
old_value=assignee.display_name,
|
||||
new_value="",
|
||||
field="assignee",
|
||||
field="assignees",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} removed assignee {assignee.email}",
|
||||
old_identifier=actor.id,
|
||||
comment=f"removed assignee {assignee.display_name}",
|
||||
old_identifier=assignee.id,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -411,11 +420,11 @@ def track_blocks(
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value="",
|
||||
new_value=f"{project.identifier}-{issue.sequence_id}",
|
||||
new_value=f"{issue.project.identifier}-{issue.sequence_id}",
|
||||
field="blocks",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} added blocking issue {project.identifier}-{issue.sequence_id}",
|
||||
comment=f"added blocking issue {project.identifier}-{issue.sequence_id}",
|
||||
new_identifier=issue.id,
|
||||
)
|
||||
)
|
||||
@@ -432,12 +441,12 @@ def track_blocks(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=f"{project.identifier}-{issue.sequence_id}",
|
||||
old_value=f"{issue.project.identifier}-{issue.sequence_id}",
|
||||
new_value="",
|
||||
field="blocks",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} removed blocking issue {project.identifier}-{issue.sequence_id}",
|
||||
comment=f"removed blocking issue {project.identifier}-{issue.sequence_id}",
|
||||
old_identifier=issue.id,
|
||||
)
|
||||
)
|
||||
@@ -473,11 +482,11 @@ def track_blockings(
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value="",
|
||||
new_value=f"{project.identifier}-{issue.sequence_id}",
|
||||
new_value=f"{issue.project.identifier}-{issue.sequence_id}",
|
||||
field="blocking",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} added blocked by issue {project.identifier}-{issue.sequence_id}",
|
||||
comment=f"added blocked by issue {project.identifier}-{issue.sequence_id}",
|
||||
new_identifier=issue.id,
|
||||
)
|
||||
)
|
||||
@@ -494,12 +503,12 @@ def track_blockings(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=f"{project.identifier}-{issue.sequence_id}",
|
||||
old_value=f"{issue.project.identifier}-{issue.sequence_id}",
|
||||
new_value="",
|
||||
field="blocking",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} removed blocked by issue {project.identifier}-{issue.sequence_id}",
|
||||
comment=f"removed blocked by issue {project.identifier}-{issue.sequence_id}",
|
||||
old_identifier=issue.id,
|
||||
)
|
||||
)
|
||||
@@ -513,7 +522,7 @@ def create_issue_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} created the issue",
|
||||
comment=f"created the issue",
|
||||
verb="created",
|
||||
actor=actor,
|
||||
)
|
||||
@@ -535,7 +544,7 @@ def track_estimate_points(
|
||||
field="estimate_point",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the estimate point to None",
|
||||
comment=f"updated the estimate point to None",
|
||||
)
|
||||
)
|
||||
else:
|
||||
@@ -549,11 +558,69 @@ def track_estimate_points(
|
||||
field="estimate_point",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated the estimate point to {requested_data.get('estimate_point')}",
|
||||
comment=f"updated the estimate point to {requested_data.get('estimate_point')}",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def track_archive_at(
|
||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||
):
|
||||
if requested_data.get("archived_at") is None:
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"has restored the issue",
|
||||
verb="updated",
|
||||
actor=actor,
|
||||
field="archived_at",
|
||||
old_value="archive",
|
||||
new_value="restore",
|
||||
)
|
||||
)
|
||||
else:
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"Plane has archived the issue",
|
||||
verb="updated",
|
||||
actor=actor,
|
||||
field="archived_at",
|
||||
old_value=None,
|
||||
new_value="archive",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def track_closed_to(
|
||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||
):
|
||||
if requested_data.get("closed_to") is not None:
|
||||
updated_state = State.objects.get(
|
||||
pk=requested_data.get("closed_to"), project=project
|
||||
)
|
||||
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue_id,
|
||||
actor=actor,
|
||||
verb="updated",
|
||||
old_value=None,
|
||||
new_value=updated_state.name,
|
||||
field="state",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"Plane updated the state to {updated_state.name}",
|
||||
old_identifier=None,
|
||||
new_identifier=updated_state.id,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def update_issue_activity(
|
||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||
):
|
||||
@@ -570,6 +637,8 @@ def update_issue_activity(
|
||||
"blocks_list": track_blocks,
|
||||
"blockers_list": track_blockings,
|
||||
"estimate_point": track_estimate_points,
|
||||
"archived_at": track_archive_at,
|
||||
"closed_to": track_closed_to,
|
||||
}
|
||||
|
||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||
@@ -597,7 +666,7 @@ def delete_issue_activity(
|
||||
IssueActivity(
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} deleted the issue",
|
||||
comment=f"deleted the issue",
|
||||
verb="deleted",
|
||||
actor=actor,
|
||||
field="issue",
|
||||
@@ -618,7 +687,7 @@ def create_comment_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} created a comment",
|
||||
comment=f"created a comment",
|
||||
verb="created",
|
||||
actor=actor,
|
||||
field="comment",
|
||||
@@ -643,7 +712,7 @@ def update_comment_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated a comment",
|
||||
comment=f"updated a comment",
|
||||
verb="updated",
|
||||
actor=actor,
|
||||
field="comment",
|
||||
@@ -664,7 +733,7 @@ def delete_comment_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} deleted the comment",
|
||||
comment=f"deleted the comment",
|
||||
verb="deleted",
|
||||
actor=actor,
|
||||
field="comment",
|
||||
@@ -702,7 +771,7 @@ def create_cycle_issue_activity(
|
||||
field="cycles",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated cycle from {old_cycle.name} to {new_cycle.name}",
|
||||
comment=f"updated cycle from {old_cycle.name} to {new_cycle.name}",
|
||||
old_identifier=old_cycle.id,
|
||||
new_identifier=new_cycle.id,
|
||||
)
|
||||
@@ -723,7 +792,7 @@ def create_cycle_issue_activity(
|
||||
field="cycles",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} added cycle {cycle.name}",
|
||||
comment=f"added cycle {cycle.name}",
|
||||
new_identifier=cycle.id,
|
||||
)
|
||||
)
|
||||
@@ -752,7 +821,7 @@ def delete_cycle_issue_activity(
|
||||
field="cycles",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} removed this issue from {cycle.name if cycle is not None else None}",
|
||||
comment=f"removed this issue from {cycle.name if cycle is not None else None}",
|
||||
old_identifier=cycle.id if cycle is not None else None,
|
||||
)
|
||||
)
|
||||
@@ -788,7 +857,7 @@ def create_module_issue_activity(
|
||||
field="modules",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated module from {old_module.name} to {new_module.name}",
|
||||
comment=f"updated module from {old_module.name} to {new_module.name}",
|
||||
old_identifier=old_module.id,
|
||||
new_identifier=new_module.id,
|
||||
)
|
||||
@@ -808,7 +877,7 @@ def create_module_issue_activity(
|
||||
field="modules",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} added module {module.name}",
|
||||
comment=f"added module {module.name}",
|
||||
new_identifier=module.id,
|
||||
)
|
||||
)
|
||||
@@ -837,7 +906,7 @@ def delete_module_issue_activity(
|
||||
field="modules",
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} removed this issue from {module.name if module is not None else None}",
|
||||
comment=f"removed this issue from {module.name if module is not None else None}",
|
||||
old_identifier=module.id if module is not None else None,
|
||||
)
|
||||
)
|
||||
@@ -856,7 +925,7 @@ def create_link_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} created a link",
|
||||
comment=f"created a link",
|
||||
verb="created",
|
||||
actor=actor,
|
||||
field="link",
|
||||
@@ -880,7 +949,7 @@ def update_link_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} updated a link",
|
||||
comment=f"updated a link",
|
||||
verb="updated",
|
||||
actor=actor,
|
||||
field="link",
|
||||
@@ -895,15 +964,22 @@ def update_link_activity(
|
||||
def delete_link_activity(
|
||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
||||
):
|
||||
|
||||
current_instance = (
|
||||
json.loads(current_instance) if current_instance is not None else None
|
||||
)
|
||||
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} deleted the link",
|
||||
comment=f"deleted the link",
|
||||
verb="deleted",
|
||||
actor=actor,
|
||||
field="link",
|
||||
old_value=current_instance.get("url", ""),
|
||||
new_value=""
|
||||
)
|
||||
)
|
||||
|
||||
@@ -921,11 +997,11 @@ def create_attachment_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} created an attachment",
|
||||
comment=f"created an attachment",
|
||||
verb="created",
|
||||
actor=actor,
|
||||
field="attachment",
|
||||
new_value=current_instance.get("access", ""),
|
||||
new_value=current_instance.get("asset", ""),
|
||||
new_identifier=current_instance.get("id", None),
|
||||
)
|
||||
)
|
||||
@@ -939,7 +1015,7 @@ def delete_attachment_activity(
|
||||
issue_id=issue_id,
|
||||
project=project,
|
||||
workspace=project.workspace,
|
||||
comment=f"{actor.email} deleted the attachment",
|
||||
comment=f"deleted the attachment",
|
||||
verb="deleted",
|
||||
actor=actor,
|
||||
field="attachment",
|
||||
@@ -950,7 +1026,13 @@ def delete_attachment_activity(
|
||||
# Receive message from room group
|
||||
@shared_task
|
||||
def issue_activity(
|
||||
type, requested_data, current_instance, issue_id, actor_id, project_id
|
||||
type,
|
||||
requested_data,
|
||||
current_instance,
|
||||
issue_id,
|
||||
actor_id,
|
||||
project_id,
|
||||
subscriber=True,
|
||||
):
|
||||
try:
|
||||
issue_activities = []
|
||||
@@ -958,6 +1040,30 @@ def issue_activity(
|
||||
actor = User.objects.get(pk=actor_id)
|
||||
project = Project.objects.get(pk=project_id)
|
||||
|
||||
if type not in [
|
||||
"cycle.activity.created",
|
||||
"cycle.activity.deleted",
|
||||
"module.activity.created",
|
||||
"module.activity.deleted",
|
||||
]:
|
||||
issue = Issue.objects.filter(pk=issue_id).first()
|
||||
|
||||
if issue is not None:
|
||||
try:
|
||||
issue.updated_at = timezone.now()
|
||||
issue.save(update_fields=["updated_at"])
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
if subscriber:
|
||||
# add the user to issue subscriber
|
||||
try:
|
||||
_ = IssueSubscriber.objects.get_or_create(
|
||||
issue_id=issue_id, subscriber=actor
|
||||
)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
ACTIVITY_MAPPER = {
|
||||
"issue.activity.created": create_issue_activity,
|
||||
"issue.activity.updated": update_issue_activity,
|
||||
@@ -992,18 +1098,97 @@ def issue_activity(
|
||||
# Post the updates to segway for integrations and webhooks
|
||||
if len(issue_activities_created):
|
||||
# Don't send activities if the actor is a bot
|
||||
if settings.PROXY_BASE_URL:
|
||||
try:
|
||||
if settings.PROXY_BASE_URL:
|
||||
for issue_activity in issue_activities_created:
|
||||
headers = {"Content-Type": "application/json"}
|
||||
issue_activity_json = json.dumps(
|
||||
IssueActivitySerializer(issue_activity).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
_ = requests.post(
|
||||
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(issue_activity.workspace_id)}/projects/{str(issue_activity.project_id)}/issues/{str(issue_activity.issue_id)}/issue-activity-hooks/",
|
||||
json=issue_activity_json,
|
||||
headers=headers,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
|
||||
if type not in [
|
||||
"cycle.activity.created",
|
||||
"cycle.activity.deleted",
|
||||
"module.activity.created",
|
||||
"module.activity.deleted",
|
||||
]:
|
||||
# Create Notifications
|
||||
bulk_notifications = []
|
||||
|
||||
issue_subscribers = list(
|
||||
IssueSubscriber.objects.filter(project=project, issue_id=issue_id)
|
||||
.exclude(subscriber_id=actor_id)
|
||||
.values_list("subscriber", flat=True)
|
||||
)
|
||||
|
||||
issue_assignees = list(
|
||||
IssueAssignee.objects.filter(project=project, issue_id=issue_id)
|
||||
.exclude(assignee_id=actor_id)
|
||||
.values_list("assignee", flat=True)
|
||||
)
|
||||
|
||||
issue_subscribers = issue_subscribers + issue_assignees
|
||||
|
||||
issue = Issue.objects.filter(pk=issue_id).first()
|
||||
|
||||
# Add bot filtering
|
||||
if (
|
||||
issue is not None
|
||||
and issue.created_by_id is not None
|
||||
and not issue.created_by.is_bot
|
||||
and str(issue.created_by_id) != str(actor_id)
|
||||
):
|
||||
issue_subscribers = issue_subscribers + [issue.created_by_id]
|
||||
|
||||
for subscriber in issue_subscribers:
|
||||
for issue_activity in issue_activities_created:
|
||||
headers = {"Content-Type": "application/json"}
|
||||
issue_activity_json = json.dumps(
|
||||
IssueActivitySerializer(issue_activity).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
_ = requests.post(
|
||||
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(issue_activity.workspace_id)}/projects/{str(issue_activity.project_id)}/issues/{str(issue_activity.issue_id)}/issue-activity-hooks/",
|
||||
json=issue_activity_json,
|
||||
headers=headers,
|
||||
bulk_notifications.append(
|
||||
Notification(
|
||||
workspace=project.workspace,
|
||||
sender="in_app:issue_activities",
|
||||
triggered_by_id=actor_id,
|
||||
receiver_id=subscriber,
|
||||
entity_identifier=issue_id,
|
||||
entity_name="issue",
|
||||
project=project,
|
||||
title=issue_activity.comment,
|
||||
data={
|
||||
"issue": {
|
||||
"id": str(issue_id),
|
||||
"name": str(issue.name),
|
||||
"identifier": str(issue.project.identifier),
|
||||
"sequence_id": issue.sequence_id,
|
||||
"state_name": issue.state.name,
|
||||
"state_group": issue.state.group,
|
||||
},
|
||||
"issue_activity": {
|
||||
"id": str(issue_activity.id),
|
||||
"verb": str(issue_activity.verb),
|
||||
"field": str(issue_activity.field),
|
||||
"actor": str(issue_activity.actor_id),
|
||||
"new_value": str(issue_activity.new_value),
|
||||
"old_value": str(issue_activity.old_value),
|
||||
"issue_comment": str(
|
||||
issue_activity.issue_comment.comment_stripped
|
||||
if issue_activity.issue_comment is not None
|
||||
else ""
|
||||
),
|
||||
},
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
# Bulk create notifications
|
||||
Notification.objects.bulk_create(bulk_notifications, batch_size=100)
|
||||
|
||||
return
|
||||
except Exception as e:
|
||||
# Print logs if in DEBUG mode
|
||||
|
||||
157
apiserver/plane/bgtasks/issue_automation_task.py
Normal file
157
apiserver/plane/bgtasks/issue_automation_task.py
Normal file
@@ -0,0 +1,157 @@
|
||||
# Python imports
|
||||
import json
|
||||
from datetime import timedelta
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import Issue, Project, State
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
|
||||
|
||||
@shared_task
|
||||
def archive_and_close_old_issues():
|
||||
archive_old_issues()
|
||||
close_old_issues()
|
||||
|
||||
|
||||
def archive_old_issues():
|
||||
try:
|
||||
# Get all the projects whose archive_in is greater than 0
|
||||
projects = Project.objects.filter(archive_in__gt=0)
|
||||
|
||||
for project in projects:
|
||||
project_id = project.id
|
||||
archive_in = project.archive_in
|
||||
|
||||
# Get all the issues whose updated_at in less that the archive_in month
|
||||
issues = Issue.objects.filter(
|
||||
Q(
|
||||
project=project_id,
|
||||
archived_at__isnull=True,
|
||||
updated_at__lte=(timezone.now() - timedelta(days=archive_in * 30)),
|
||||
state__group__in=["completed", "cancelled"],
|
||||
),
|
||||
Q(issue_cycle__isnull=True)
|
||||
| (
|
||||
Q(issue_cycle__cycle__end_date__lt=timezone.now().date())
|
||||
& Q(issue_cycle__isnull=False)
|
||||
),
|
||||
Q(issue_module__isnull=True)
|
||||
| (
|
||||
Q(issue_module__module__target_date__lt=timezone.now().date())
|
||||
& Q(issue_module__isnull=False)
|
||||
),
|
||||
).filter(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True)
|
||||
)
|
||||
|
||||
# Check if Issues
|
||||
if issues:
|
||||
issues_to_update = []
|
||||
for issue in issues:
|
||||
issue.archived_at = timezone.now()
|
||||
issues_to_update.append(issue)
|
||||
|
||||
# Bulk Update the issues and log the activity
|
||||
Issue.objects.bulk_update(
|
||||
issues_to_update, ["archived_at"], batch_size=100
|
||||
)
|
||||
[
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=json.dumps({"archived_at": str(issue.archived_at)}),
|
||||
actor_id=str(project.created_by_id),
|
||||
issue_id=issue.id,
|
||||
project_id=project_id,
|
||||
current_instance=None,
|
||||
subscriber=False,
|
||||
)
|
||||
for issue in issues_to_update
|
||||
]
|
||||
return
|
||||
except Exception as e:
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
|
||||
|
||||
def close_old_issues():
|
||||
try:
|
||||
# Get all the projects whose close_in is greater than 0
|
||||
projects = Project.objects.filter(close_in__gt=0).select_related(
|
||||
"default_state"
|
||||
)
|
||||
|
||||
for project in projects:
|
||||
project_id = project.id
|
||||
close_in = project.close_in
|
||||
|
||||
# Get all the issues whose updated_at in less that the close_in month
|
||||
issues = Issue.objects.filter(
|
||||
Q(
|
||||
project=project_id,
|
||||
archived_at__isnull=True,
|
||||
updated_at__lte=(timezone.now() - timedelta(days=close_in * 30)),
|
||||
state__group__in=["backlog", "unstarted", "started"],
|
||||
),
|
||||
Q(issue_cycle__isnull=True)
|
||||
| (
|
||||
Q(issue_cycle__cycle__end_date__lt=timezone.now().date())
|
||||
& Q(issue_cycle__isnull=False)
|
||||
),
|
||||
Q(issue_module__isnull=True)
|
||||
| (
|
||||
Q(issue_module__module__target_date__lt=timezone.now().date())
|
||||
& Q(issue_module__isnull=False)
|
||||
),
|
||||
).filter(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True)
|
||||
)
|
||||
|
||||
# Check if Issues
|
||||
if issues:
|
||||
if project.default_state is None:
|
||||
close_state = State.objects.filter(group="cancelled").first()
|
||||
else:
|
||||
close_state = project.default_state
|
||||
|
||||
issues_to_update = []
|
||||
for issue in issues:
|
||||
issue.state = close_state
|
||||
issues_to_update.append(issue)
|
||||
|
||||
# Bulk Update the issues and log the activity
|
||||
Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100)
|
||||
[
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=json.dumps({"closed_to": str(issue.state_id)}),
|
||||
actor_id=str(project.created_by_id),
|
||||
issue_id=issue.id,
|
||||
project_id=project_id,
|
||||
current_instance=None,
|
||||
subscriber=False,
|
||||
)
|
||||
for issue in issues_to_update
|
||||
]
|
||||
return
|
||||
except Exception as e:
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
from celery import Celery
|
||||
from plane.settings.redis import redis_instance
|
||||
from celery.schedules import crontab
|
||||
|
||||
# Set the default Django settings module for the 'celery' program.
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
|
||||
@@ -13,5 +14,19 @@ app = Celery("plane")
|
||||
# pickle the object when using Windows.
|
||||
app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
|
||||
app.conf.beat_schedule = {
|
||||
# Executes every day at 12 AM
|
||||
"check-every-day-to-archive-and-close": {
|
||||
"task": "plane.bgtasks.issue_automation_task.archive_and_close_old_issues",
|
||||
"schedule": crontab(hour=0, minute=0),
|
||||
},
|
||||
"check-every-day-to-delete_exporter_history": {
|
||||
"task": "plane.bgtasks.exporter_expired_task.delete_old_s3_link",
|
||||
"schedule": crontab(hour=0, minute=0),
|
||||
},
|
||||
}
|
||||
|
||||
# Load task modules from all registered Django app configs.
|
||||
app.autodiscover_tasks()
|
||||
|
||||
app.conf.beat_scheduler = 'django_celery_beat.schedulers.DatabaseScheduler'
|
||||
42
apiserver/plane/db/migrations/0035_auto_20230704_2225.py
Normal file
42
apiserver/plane/db/migrations/0035_auto_20230704_2225.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Generated by Django 3.2.19 on 2023-07-04 16:55
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def update_company_organization_size(apps, schema_editor):
|
||||
Model = apps.get_model("db", "Workspace")
|
||||
updated_size = []
|
||||
for obj in Model.objects.all():
|
||||
obj.organization_size = str(obj.company_size)
|
||||
updated_size.append(obj)
|
||||
|
||||
Model.objects.bulk_update(updated_size, ["organization_size"], batch_size=100)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("db", "0034_auto_20230628_1046"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="workspace",
|
||||
name="organization_size",
|
||||
field=models.CharField(default="2-10", max_length=20),
|
||||
),
|
||||
migrations.RunPython(update_company_organization_size),
|
||||
migrations.AlterField(
|
||||
model_name="workspace",
|
||||
name="name",
|
||||
field=models.CharField(max_length=80, verbose_name="Workspace Name"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="workspace",
|
||||
name="slug",
|
||||
field=models.SlugField(max_length=48, unique=True),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="workspace",
|
||||
name="company_size",
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.19 on 2023-07-05 07:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0035_auto_20230704_2225'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='workspace',
|
||||
name='organization_size',
|
||||
field=models.CharField(max_length=20),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,266 @@
|
||||
# Generated by Django 4.2.3 on 2023-07-19 06:52
|
||||
|
||||
from django.conf import settings
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import plane.db.models.user
|
||||
import uuid
|
||||
|
||||
|
||||
|
||||
def onboarding_default_steps(apps, schema_editor):
|
||||
default_onboarding_schema = {
|
||||
"workspace_join": True,
|
||||
"profile_complete": True,
|
||||
"workspace_create": True,
|
||||
"workspace_invite": True,
|
||||
}
|
||||
|
||||
Model = apps.get_model("db", "User")
|
||||
updated_user = []
|
||||
for obj in Model.objects.filter(is_onboarded=True):
|
||||
obj.onboarding_step = default_onboarding_schema
|
||||
obj.is_tour_completed = True
|
||||
updated_user.append(obj)
|
||||
|
||||
Model.objects.bulk_update(updated_user, ["onboarding_step", "is_tour_completed"], batch_size=100)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("db", "0036_alter_workspace_organization_size"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="issue",
|
||||
name="archived_at",
|
||||
field=models.DateField(null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="project",
|
||||
name="archive_in",
|
||||
field=models.IntegerField(
|
||||
default=0,
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(0),
|
||||
django.core.validators.MaxValueValidator(12),
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="project",
|
||||
name="close_in",
|
||||
field=models.IntegerField(
|
||||
default=0,
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(0),
|
||||
django.core.validators.MaxValueValidator(12),
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="project",
|
||||
name="default_state",
|
||||
field=models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="default_state",
|
||||
to="db.state",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="is_tour_completed",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="onboarding_step",
|
||||
field=models.JSONField(default=plane.db.models.user.get_default_onboarding),
|
||||
),
|
||||
migrations.RunPython(onboarding_default_steps),
|
||||
migrations.CreateModel(
|
||||
name="Notification",
|
||||
fields=[
|
||||
(
|
||||
"created_at",
|
||||
models.DateTimeField(auto_now_add=True, verbose_name="Created At"),
|
||||
),
|
||||
(
|
||||
"updated_at",
|
||||
models.DateTimeField(
|
||||
auto_now=True, verbose_name="Last Modified At"
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
db_index=True,
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
("data", models.JSONField(null=True)),
|
||||
("entity_identifier", models.UUIDField(null=True)),
|
||||
("entity_name", models.CharField(max_length=255)),
|
||||
("title", models.TextField()),
|
||||
("message", models.JSONField(null=True)),
|
||||
("message_html", models.TextField(blank=True, default="<p></p>")),
|
||||
("message_stripped", models.TextField(blank=True, null=True)),
|
||||
("sender", models.CharField(max_length=255)),
|
||||
("read_at", models.DateTimeField(null=True)),
|
||||
("snoozed_till", models.DateTimeField(null=True)),
|
||||
("archived_at", models.DateTimeField(null=True)),
|
||||
(
|
||||
"created_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="%(class)s_created_by",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="Created By",
|
||||
),
|
||||
),
|
||||
(
|
||||
"project",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="notifications",
|
||||
to="db.project",
|
||||
),
|
||||
),
|
||||
(
|
||||
"receiver",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="received_notifications",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"triggered_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="triggered_notifications",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"updated_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="%(class)s_updated_by",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="Last Modified By",
|
||||
),
|
||||
),
|
||||
(
|
||||
"workspace",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="notifications",
|
||||
to="db.workspace",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Notification",
|
||||
"verbose_name_plural": "Notifications",
|
||||
"db_table": "notifications",
|
||||
"ordering": ("-created_at",),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="IssueSubscriber",
|
||||
fields=[
|
||||
(
|
||||
"created_at",
|
||||
models.DateTimeField(auto_now_add=True, verbose_name="Created At"),
|
||||
),
|
||||
(
|
||||
"updated_at",
|
||||
models.DateTimeField(
|
||||
auto_now=True, verbose_name="Last Modified At"
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
db_index=True,
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"created_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="%(class)s_created_by",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="Created By",
|
||||
),
|
||||
),
|
||||
(
|
||||
"issue",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="issue_subscribers",
|
||||
to="db.issue",
|
||||
),
|
||||
),
|
||||
(
|
||||
"project",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="project_%(class)s",
|
||||
to="db.project",
|
||||
),
|
||||
),
|
||||
(
|
||||
"subscriber",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="issue_subscribers",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"updated_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="%(class)s_updated_by",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="Last Modified By",
|
||||
),
|
||||
),
|
||||
(
|
||||
"workspace",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="workspace_%(class)s",
|
||||
to="db.workspace",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Issue Subscriber",
|
||||
"verbose_name_plural": "Issue Subscribers",
|
||||
"db_table": "issue_subscribers",
|
||||
"ordering": ("-created_at",),
|
||||
"unique_together": {("issue", "subscriber")},
|
||||
},
|
||||
),
|
||||
]
|
||||
35
apiserver/plane/db/migrations/0038_auto_20230720_1505.py
Normal file
35
apiserver/plane/db/migrations/0038_auto_20230720_1505.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# Generated by Django 4.2.3 on 2023-07-20 09:35
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def restructure_theming(apps, schema_editor):
|
||||
Model = apps.get_model("db", "User")
|
||||
updated_user = []
|
||||
for obj in Model.objects.exclude(theme={}).all():
|
||||
current_theme = obj.theme
|
||||
updated_theme = {
|
||||
"primary": current_theme.get("accent", ""),
|
||||
"background": current_theme.get("bgBase", ""),
|
||||
"sidebarBackground": current_theme.get("sidebar", ""),
|
||||
"text": current_theme.get("textBase", ""),
|
||||
"sidebarText": current_theme.get("textBase", ""),
|
||||
"palette": f"""{current_theme.get("bgBase","")},{current_theme.get("textBase", "")},{current_theme.get("accent", "")},{current_theme.get("sidebar","")},{current_theme.get("textBase", "")}""",
|
||||
"darkPalette": current_theme.get("darkPalette", "")
|
||||
}
|
||||
obj.theme = updated_theme
|
||||
updated_user.append(obj)
|
||||
|
||||
Model.objects.bulk_update(
|
||||
updated_user, ["theme"], batch_size=100
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("db", "0037_issue_archived_at_project_archive_in_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(restructure_theming)
|
||||
]
|
||||
97
apiserver/plane/db/migrations/0039_auto_20230723_2203.py
Normal file
97
apiserver/plane/db/migrations/0039_auto_20230723_2203.py
Normal file
@@ -0,0 +1,97 @@
|
||||
# Generated by Django 4.2.3 on 2023-07-23 16:33
|
||||
import random
|
||||
from django.db import migrations, models
|
||||
import plane.db.models.workspace
|
||||
|
||||
|
||||
def rename_field(apps, schema_editor):
|
||||
Model = apps.get_model("db", "IssueActivity")
|
||||
updated_activity = []
|
||||
for obj in Model.objects.filter(field="assignee"):
|
||||
obj.field = "assignees"
|
||||
updated_activity.append(obj)
|
||||
|
||||
Model.objects.bulk_update(updated_activity, ["field"], batch_size=100)
|
||||
|
||||
|
||||
def update_workspace_member_props(apps, schema_editor):
|
||||
Model = apps.get_model("db", "WorkspaceMember")
|
||||
|
||||
updated_workspace_member = []
|
||||
|
||||
for obj in Model.objects.all():
|
||||
if obj.view_props is None:
|
||||
obj.view_props = {
|
||||
"filters": {"type": None},
|
||||
"groupByProperty": None,
|
||||
"issueView": "list",
|
||||
"orderBy": "-created_at",
|
||||
"properties": {
|
||||
"assignee": True,
|
||||
"due_date": True,
|
||||
"key": True,
|
||||
"labels": True,
|
||||
"priority": True,
|
||||
"state": True,
|
||||
"sub_issue_count": True,
|
||||
"attachment_count": True,
|
||||
"link": True,
|
||||
"estimate": True,
|
||||
"created_on": True,
|
||||
"updated_on": True,
|
||||
},
|
||||
"showEmptyGroups": True,
|
||||
}
|
||||
else:
|
||||
current_view_props = obj.view_props
|
||||
obj.view_props = {
|
||||
"filters": {"type": None},
|
||||
"groupByProperty": None,
|
||||
"issueView": "list",
|
||||
"orderBy": "-created_at",
|
||||
"showEmptyGroups": True,
|
||||
"properties": current_view_props,
|
||||
}
|
||||
|
||||
updated_workspace_member.append(obj)
|
||||
|
||||
Model.objects.bulk_update(updated_workspace_member, ["view_props"], batch_size=100)
|
||||
|
||||
|
||||
def update_project_member_sort_order(apps, schema_editor):
|
||||
Model = apps.get_model("db", "ProjectMember")
|
||||
|
||||
updated_project_members = []
|
||||
|
||||
for obj in Model.objects.all():
|
||||
obj.sort_order = random.randint(1, 65536)
|
||||
updated_project_members.append(obj)
|
||||
|
||||
Model.objects.bulk_update(updated_project_members, ["sort_order"], batch_size=100)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("db", "0038_auto_20230720_1505"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(rename_field),
|
||||
migrations.RunPython(update_workspace_member_props),
|
||||
migrations.AlterField(
|
||||
model_name='workspacemember',
|
||||
name='view_props',
|
||||
field=models.JSONField(default=plane.db.models.workspace.get_default_props),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workspacemember',
|
||||
name='default_props',
|
||||
field=models.JSONField(default=plane.db.models.workspace.get_default_props),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='projectmember',
|
||||
name='sort_order',
|
||||
field=models.FloatField(default=65535),
|
||||
),
|
||||
migrations.RunPython(update_project_member_sort_order),
|
||||
]
|
||||
@@ -0,0 +1,81 @@
|
||||
# Generated by Django 4.2.3 on 2023-08-01 06:02
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import plane.db.models.project
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0039_auto_20230723_2203'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='projectmember',
|
||||
name='preferences',
|
||||
field=models.JSONField(default=plane.db.models.project.get_default_preferences),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='cover_image',
|
||||
field=models.URLField(blank=True, max_length=800, null=True),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='IssueReaction',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('reaction', models.CharField(max_length=20)),
|
||||
('actor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_reactions', to=settings.AUTH_USER_MODEL)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_reactions', to='db.issue')),
|
||||
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Issue Reaction',
|
||||
'verbose_name_plural': 'Issue Reactions',
|
||||
'db_table': 'issue_reactions',
|
||||
'ordering': ('-created_at',),
|
||||
'unique_together': {('issue', 'actor', 'reaction')},
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='CommentReaction',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('reaction', models.CharField(max_length=20)),
|
||||
('actor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comment_reactions', to=settings.AUTH_USER_MODEL)),
|
||||
('comment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comment_reactions', to='db.issuecomment')),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Comment Reaction',
|
||||
'verbose_name_plural': 'Comment Reactions',
|
||||
'db_table': 'comment_reactions',
|
||||
'ordering': ('-created_at',),
|
||||
'unique_together': {('comment', 'actor', 'reaction')},
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='identifier',
|
||||
field=models.CharField(max_length=12, verbose_name='Project Identifier'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectidentifier',
|
||||
name='name',
|
||||
field=models.CharField(max_length=12),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,243 @@
|
||||
# Generated by Django 4.2.3 on 2023-08-14 07:12
|
||||
|
||||
from django.conf import settings
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import plane.db.models.exporter
|
||||
import plane.db.models.project
|
||||
import uuid
|
||||
import random
|
||||
import string
|
||||
|
||||
def generate_display_name(apps, schema_editor):
|
||||
UserModel = apps.get_model("db", "User")
|
||||
updated_users = []
|
||||
for obj in UserModel.objects.all():
|
||||
obj.display_name = (
|
||||
obj.email.split("@")[0]
|
||||
if len(obj.email.split("@"))
|
||||
else "".join(random.choice(string.ascii_letters) for _ in range(6))
|
||||
)
|
||||
updated_users.append(obj)
|
||||
UserModel.objects.bulk_update(updated_users, ["display_name"], batch_size=100)
|
||||
|
||||
|
||||
def rectify_field_issue_activity(apps, schema_editor):
|
||||
Model = apps.get_model("db", "IssueActivity")
|
||||
updated_activity = []
|
||||
for obj in Model.objects.filter(field="assignee"):
|
||||
obj.field = "assignees"
|
||||
updated_activity.append(obj)
|
||||
|
||||
Model.objects.bulk_update(updated_activity, ["field"], batch_size=100)
|
||||
|
||||
|
||||
def update_assignee_issue_activity(apps, schema_editor):
|
||||
Model = apps.get_model("db", "IssueActivity")
|
||||
updated_activity = []
|
||||
|
||||
# Get all the users
|
||||
User = apps.get_model("db", "User")
|
||||
users = User.objects.values("id", "email", "display_name")
|
||||
|
||||
for obj in Model.objects.filter(field="assignees"):
|
||||
if bool(obj.new_value) and not bool(obj.old_value):
|
||||
# Get user from list
|
||||
assigned_user = [
|
||||
user for user in users if user.get("email") == obj.new_value
|
||||
]
|
||||
if assigned_user:
|
||||
obj.new_value = assigned_user[0].get("display_name")
|
||||
obj.new_identifier = assigned_user[0].get("id")
|
||||
# Update the comment
|
||||
words = obj.comment.split()
|
||||
words[-1] = assigned_user[0].get("display_name")
|
||||
obj.comment = " ".join(words)
|
||||
|
||||
if bool(obj.old_value) and not bool(obj.new_value):
|
||||
# Get user from list
|
||||
assigned_user = [
|
||||
user for user in users if user.get("email") == obj.old_value
|
||||
]
|
||||
if assigned_user:
|
||||
obj.old_value = assigned_user[0].get("display_name")
|
||||
obj.old_identifier = assigned_user[0].get("id")
|
||||
# Update the comment
|
||||
words = obj.comment.split()
|
||||
words[-1] = assigned_user[0].get("display_name")
|
||||
obj.comment = " ".join(words)
|
||||
|
||||
updated_activity.append(obj)
|
||||
|
||||
Model.objects.bulk_update(
|
||||
updated_activity,
|
||||
["old_value", "new_value", "old_identifier", "new_identifier", "comment"],
|
||||
batch_size=200,
|
||||
)
|
||||
|
||||
|
||||
def update_name_activity(apps, schema_editor):
|
||||
Model = apps.get_model("db", "IssueActivity")
|
||||
update_activity = []
|
||||
for obj in Model.objects.filter(field="name"):
|
||||
obj.comment = obj.comment.replace("start date", "name")
|
||||
update_activity.append(obj)
|
||||
|
||||
Model.objects.bulk_update(update_activity, ["comment"], batch_size=1000)
|
||||
|
||||
|
||||
def random_cycle_order(apps, schema_editor):
|
||||
CycleModel = apps.get_model("db", "Cycle")
|
||||
updated_cycles = []
|
||||
for obj in CycleModel.objects.all():
|
||||
obj.sort_order = random.randint(1, 65536)
|
||||
updated_cycles.append(obj)
|
||||
CycleModel.objects.bulk_update(updated_cycles, ["sort_order"], batch_size=100)
|
||||
|
||||
|
||||
def random_module_order(apps, schema_editor):
|
||||
ModuleModel = apps.get_model("db", "Module")
|
||||
updated_modules = []
|
||||
for obj in ModuleModel.objects.all():
|
||||
obj.sort_order = random.randint(1, 65536)
|
||||
updated_modules.append(obj)
|
||||
ModuleModel.objects.bulk_update(updated_modules, ["sort_order"], batch_size=100)
|
||||
|
||||
|
||||
def update_user_issue_properties(apps, schema_editor):
|
||||
IssuePropertyModel = apps.get_model("db", "IssueProperty")
|
||||
updated_issue_properties = []
|
||||
for obj in IssuePropertyModel.objects.all():
|
||||
obj.properties["start_date"] = True
|
||||
updated_issue_properties.append(obj)
|
||||
IssuePropertyModel.objects.bulk_update(
|
||||
updated_issue_properties, ["properties"], batch_size=100
|
||||
)
|
||||
|
||||
|
||||
def workspace_member_properties(apps, schema_editor):
|
||||
WorkspaceMemberModel = apps.get_model("db", "WorkspaceMember")
|
||||
updated_workspace_members = []
|
||||
for obj in WorkspaceMemberModel.objects.all():
|
||||
obj.view_props["properties"]["start_date"] = True
|
||||
obj.default_props["properties"]["start_date"] = True
|
||||
updated_workspace_members.append(obj)
|
||||
|
||||
WorkspaceMemberModel.objects.bulk_update(
|
||||
updated_workspace_members, ["view_props", "default_props"], batch_size=100
|
||||
)
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0040_projectmember_preferences_user_cover_image_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='cycle',
|
||||
name='sort_order',
|
||||
field=models.FloatField(default=65535),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='issuecomment',
|
||||
name='access',
|
||||
field=models.CharField(choices=[('INTERNAL', 'INTERNAL'), ('EXTERNAL', 'EXTERNAL')], default='INTERNAL', max_length=100),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='module',
|
||||
name='sort_order',
|
||||
field=models.FloatField(default=65535),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='display_name',
|
||||
field=models.CharField(default='', max_length=255),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ExporterHistory',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('project', django.contrib.postgres.fields.ArrayField(base_field=models.UUIDField(default=uuid.uuid4), blank=True, null=True, size=None)),
|
||||
('provider', models.CharField(choices=[('json', 'json'), ('csv', 'csv'), ('xlsx', 'xlsx')], max_length=50)),
|
||||
('status', models.CharField(choices=[('queued', 'Queued'), ('processing', 'Processing'), ('completed', 'Completed'), ('failed', 'Failed')], default='queued', max_length=50)),
|
||||
('reason', models.TextField(blank=True)),
|
||||
('key', models.TextField(blank=True)),
|
||||
('url', models.URLField(blank=True, max_length=800, null=True)),
|
||||
('token', models.CharField(default=plane.db.models.exporter.generate_token, max_length=255, unique=True)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('initiated_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_exporters', to=settings.AUTH_USER_MODEL)),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_exporters', to='db.workspace')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Exporter',
|
||||
'verbose_name_plural': 'Exporters',
|
||||
'db_table': 'exporters',
|
||||
'ordering': ('-created_at',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ProjectDeployBoard',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('anchor', models.CharField(db_index=True, default=plane.db.models.project.get_anchor, max_length=255, unique=True)),
|
||||
('comments', models.BooleanField(default=False)),
|
||||
('reactions', models.BooleanField(default=False)),
|
||||
('votes', models.BooleanField(default=False)),
|
||||
('views', models.JSONField(default=plane.db.models.project.get_default_views)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('inbox', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='bord_inbox', to='db.inbox')),
|
||||
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Project Deploy Board',
|
||||
'verbose_name_plural': 'Project Deploy Boards',
|
||||
'db_table': 'project_deploy_boards',
|
||||
'ordering': ('-created_at',),
|
||||
'unique_together': {('project', 'anchor')},
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='IssueVote',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('vote', models.IntegerField(choices=[(-1, 'DOWNVOTE'), (1, 'UPVOTE')])),
|
||||
('actor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='votes', to=settings.AUTH_USER_MODEL)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='votes', to='db.issue')),
|
||||
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Issue Vote',
|
||||
'verbose_name_plural': 'Issue Votes',
|
||||
'db_table': 'issue_votes',
|
||||
'ordering': ('-created_at',),
|
||||
'unique_together': {('issue', 'actor')},
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='modulelink',
|
||||
name='title',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.RunPython(generate_display_name),
|
||||
migrations.RunPython(rectify_field_issue_activity),
|
||||
migrations.RunPython(update_assignee_issue_activity),
|
||||
migrations.RunPython(update_name_activity),
|
||||
migrations.RunPython(random_cycle_order),
|
||||
migrations.RunPython(random_module_order),
|
||||
migrations.RunPython(update_user_issue_properties),
|
||||
migrations.RunPython(workspace_member_properties),
|
||||
]
|
||||
@@ -18,6 +18,7 @@ from .project import (
|
||||
ProjectMemberInvite,
|
||||
ProjectIdentifier,
|
||||
ProjectFavorite,
|
||||
ProjectDeployBoard,
|
||||
)
|
||||
|
||||
from .issue import (
|
||||
@@ -33,6 +34,10 @@ from .issue import (
|
||||
IssueLink,
|
||||
IssueSequence,
|
||||
IssueAttachment,
|
||||
IssueSubscriber,
|
||||
IssueReaction,
|
||||
CommentReaction,
|
||||
IssueVote,
|
||||
)
|
||||
|
||||
from .asset import FileAsset
|
||||
@@ -66,4 +71,9 @@ from .page import Page, PageBlock, PageFavorite, PageLabel
|
||||
from .estimate import Estimate, EstimatePoint
|
||||
|
||||
from .inbox import Inbox, InboxIssue
|
||||
|
||||
from .analytic import AnalyticView
|
||||
|
||||
from .notification import Notification
|
||||
|
||||
from .exporter import ExporterHistory
|
||||
@@ -17,6 +17,7 @@ class Cycle(ProjectBaseModel):
|
||||
related_name="owned_by_cycle",
|
||||
)
|
||||
view_props = models.JSONField(default=dict)
|
||||
sort_order = models.FloatField(default=65535)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Cycle"
|
||||
@@ -24,6 +25,17 @@ class Cycle(ProjectBaseModel):
|
||||
db_table = "cycles"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self._state.adding:
|
||||
smallest_sort_order = Cycle.objects.filter(
|
||||
project=self.project
|
||||
).aggregate(smallest=models.Min("sort_order"))["smallest"]
|
||||
|
||||
if smallest_sort_order is not None:
|
||||
self.sort_order = smallest_sort_order - 10000
|
||||
|
||||
super(Cycle, self).save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the cycle"""
|
||||
return f"{self.name} <{self.project.name}>"
|
||||
|
||||
56
apiserver/plane/db/models/exporter.py
Normal file
56
apiserver/plane/db/models/exporter.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import uuid
|
||||
|
||||
# Python imports
|
||||
from uuid import uuid4
|
||||
|
||||
# Django imports
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
|
||||
# Module imports
|
||||
from . import BaseModel
|
||||
|
||||
def generate_token():
|
||||
return uuid4().hex
|
||||
|
||||
class ExporterHistory(BaseModel):
|
||||
workspace = models.ForeignKey(
|
||||
"db.WorkSpace", on_delete=models.CASCADE, related_name="workspace_exporters"
|
||||
)
|
||||
project = ArrayField(models.UUIDField(default=uuid.uuid4), blank=True, null=True)
|
||||
provider = models.CharField(
|
||||
max_length=50,
|
||||
choices=(
|
||||
("json", "json"),
|
||||
("csv", "csv"),
|
||||
("xlsx", "xlsx"),
|
||||
),
|
||||
)
|
||||
status = models.CharField(
|
||||
max_length=50,
|
||||
choices=(
|
||||
("queued", "Queued"),
|
||||
("processing", "Processing"),
|
||||
("completed", "Completed"),
|
||||
("failed", "Failed"),
|
||||
),
|
||||
default="queued",
|
||||
)
|
||||
reason = models.TextField(blank=True)
|
||||
key = models.TextField(blank=True)
|
||||
url = models.URLField(max_length=800, blank=True, null=True)
|
||||
token = models.CharField(max_length=255, default=generate_token, unique=True)
|
||||
initiated_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="workspace_exporters"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Exporter"
|
||||
verbose_name_plural = "Exporters"
|
||||
db_table = "exporters"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the service"""
|
||||
return f"{self.provider} <{self.workspace.name}>"
|
||||
@@ -28,6 +28,7 @@ class IssueManager(models.Manager):
|
||||
| models.Q(issue_inbox__status=2)
|
||||
| models.Q(issue_inbox__isnull=True)
|
||||
)
|
||||
.exclude(archived_at__isnull=False)
|
||||
)
|
||||
|
||||
|
||||
@@ -81,6 +82,7 @@ class Issue(ProjectBaseModel):
|
||||
)
|
||||
sort_order = models.FloatField(default=65535)
|
||||
completed_at = models.DateTimeField(null=True)
|
||||
archived_at = models.DateField(null=True)
|
||||
|
||||
objects = models.Manager()
|
||||
issue_objects = IssueManager()
|
||||
@@ -106,11 +108,7 @@ class Issue(ProjectBaseModel):
|
||||
~models.Q(name="Triage"), project=self.project
|
||||
).first()
|
||||
self.state = random_state
|
||||
if random_state.group == "started":
|
||||
self.start_date = timezone.now().date()
|
||||
else:
|
||||
if default_state.group == "started":
|
||||
self.start_date = timezone.now().date()
|
||||
self.state = default_state
|
||||
except ImportError:
|
||||
pass
|
||||
@@ -125,8 +123,6 @@ class Issue(ProjectBaseModel):
|
||||
PageBlock.objects.filter(issue_id=self.id).filter().update(
|
||||
completed_at=timezone.now()
|
||||
)
|
||||
elif self.state.group == "started":
|
||||
self.start_date = timezone.now().date()
|
||||
else:
|
||||
PageBlock.objects.filter(issue_id=self.id).filter().update(
|
||||
completed_at=None
|
||||
@@ -151,9 +147,6 @@ class Issue(ProjectBaseModel):
|
||||
if largest_sort_order is not None:
|
||||
self.sort_order = largest_sort_order + 10000
|
||||
|
||||
# If adding it to started state
|
||||
if self.state.group == "started":
|
||||
self.start_date = timezone.now().date()
|
||||
# Strip the html tags using html parser
|
||||
self.description_stripped = (
|
||||
None
|
||||
@@ -207,7 +200,7 @@ class IssueAssignee(ProjectBaseModel):
|
||||
|
||||
|
||||
class IssueLink(ProjectBaseModel):
|
||||
title = models.CharField(max_length=255, null=True)
|
||||
title = models.CharField(max_length=255, null=True, blank=True)
|
||||
url = models.URLField()
|
||||
issue = models.ForeignKey(
|
||||
"db.Issue", on_delete=models.CASCADE, related_name="issue_link"
|
||||
@@ -308,6 +301,14 @@ class IssueComment(ProjectBaseModel):
|
||||
related_name="comments",
|
||||
null=True,
|
||||
)
|
||||
access = models.CharField(
|
||||
choices=(
|
||||
("INTERNAL", "INTERNAL"),
|
||||
("EXTERNAL", "EXTERNAL"),
|
||||
),
|
||||
default="INTERNAL",
|
||||
max_length=100,
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.comment_stripped = (
|
||||
@@ -401,6 +402,93 @@ class IssueSequence(ProjectBaseModel):
|
||||
ordering = ("-created_at",)
|
||||
|
||||
|
||||
class IssueSubscriber(ProjectBaseModel):
|
||||
issue = models.ForeignKey(
|
||||
Issue, on_delete=models.CASCADE, related_name="issue_subscribers"
|
||||
)
|
||||
subscriber = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="issue_subscribers",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["issue", "subscriber"]
|
||||
verbose_name = "Issue Subscriber"
|
||||
verbose_name_plural = "Issue Subscribers"
|
||||
db_table = "issue_subscribers"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.issue.name} {self.subscriber.email}"
|
||||
|
||||
|
||||
class IssueReaction(ProjectBaseModel):
|
||||
actor = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="issue_reactions",
|
||||
)
|
||||
issue = models.ForeignKey(
|
||||
Issue, on_delete=models.CASCADE, related_name="issue_reactions"
|
||||
)
|
||||
reaction = models.CharField(max_length=20)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["issue", "actor", "reaction"]
|
||||
verbose_name = "Issue Reaction"
|
||||
verbose_name_plural = "Issue Reactions"
|
||||
db_table = "issue_reactions"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.issue.name} {self.actor.email}"
|
||||
|
||||
|
||||
class CommentReaction(ProjectBaseModel):
|
||||
actor = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="comment_reactions",
|
||||
)
|
||||
comment = models.ForeignKey(
|
||||
IssueComment, on_delete=models.CASCADE, related_name="comment_reactions"
|
||||
)
|
||||
reaction = models.CharField(max_length=20)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["comment", "actor", "reaction"]
|
||||
verbose_name = "Comment Reaction"
|
||||
verbose_name_plural = "Comment Reactions"
|
||||
db_table = "comment_reactions"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.issue.name} {self.actor.email}"
|
||||
|
||||
|
||||
class IssueVote(ProjectBaseModel):
|
||||
issue = models.ForeignKey(Issue, on_delete=models.CASCADE, related_name="votes")
|
||||
actor = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="votes"
|
||||
)
|
||||
vote = models.IntegerField(
|
||||
choices=(
|
||||
(-1, "DOWNVOTE"),
|
||||
(1, "UPVOTE"),
|
||||
)
|
||||
)
|
||||
class Meta:
|
||||
unique_together = ["issue", "actor"]
|
||||
verbose_name = "Issue Vote"
|
||||
verbose_name_plural = "Issue Votes"
|
||||
db_table = "issue_votes"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.issue.name} {self.actor.email}"
|
||||
|
||||
|
||||
# TODO: Find a better method to save the model
|
||||
@receiver(post_save, sender=Issue)
|
||||
def create_issue_sequence(sender, instance, created, **kwargs):
|
||||
|
||||
@@ -40,6 +40,7 @@ class Module(ProjectBaseModel):
|
||||
through_fields=("module", "member"),
|
||||
)
|
||||
view_props = models.JSONField(default=dict)
|
||||
sort_order = models.FloatField(default=65535)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["name", "project"]
|
||||
@@ -48,6 +49,17 @@ class Module(ProjectBaseModel):
|
||||
db_table = "modules"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self._state.adding:
|
||||
smallest_sort_order = Module.objects.filter(
|
||||
project=self.project
|
||||
).aggregate(smallest=models.Min("sort_order"))["smallest"]
|
||||
|
||||
if smallest_sort_order is not None:
|
||||
self.sort_order = smallest_sort_order - 10000
|
||||
|
||||
super(Module, self).save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name} {self.start_date} {self.target_date}"
|
||||
|
||||
@@ -86,7 +98,7 @@ class ModuleIssue(ProjectBaseModel):
|
||||
|
||||
|
||||
class ModuleLink(ProjectBaseModel):
|
||||
title = models.CharField(max_length=255, null=True)
|
||||
title = models.CharField(max_length=255, blank=True, null=True)
|
||||
url = models.URLField()
|
||||
module = models.ForeignKey(
|
||||
Module, on_delete=models.CASCADE, related_name="link_module"
|
||||
|
||||
37
apiserver/plane/db/models/notification.py
Normal file
37
apiserver/plane/db/models/notification.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# Django imports
|
||||
from django.db import models
|
||||
|
||||
# Third party imports
|
||||
from .base import BaseModel
|
||||
|
||||
|
||||
class Notification(BaseModel):
|
||||
workspace = models.ForeignKey(
|
||||
"db.Workspace", related_name="notifications", on_delete=models.CASCADE
|
||||
)
|
||||
project = models.ForeignKey(
|
||||
"db.Project", related_name="notifications", on_delete=models.CASCADE, null=True
|
||||
)
|
||||
data = models.JSONField(null=True)
|
||||
entity_identifier = models.UUIDField(null=True)
|
||||
entity_name = models.CharField(max_length=255)
|
||||
title = models.TextField()
|
||||
message = models.JSONField(null=True)
|
||||
message_html = models.TextField(blank=True, default="<p></p>")
|
||||
message_stripped = models.TextField(blank=True, null=True)
|
||||
sender = models.CharField(max_length=255)
|
||||
triggered_by = models.ForeignKey("db.User", related_name="triggered_notifications", on_delete=models.SET_NULL, null=True)
|
||||
receiver = models.ForeignKey("db.User", related_name="received_notifications", on_delete=models.CASCADE)
|
||||
read_at = models.DateTimeField(null=True)
|
||||
snoozed_till = models.DateTimeField(null=True)
|
||||
archived_at = models.DateTimeField(null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Notification"
|
||||
verbose_name_plural = "Notifications"
|
||||
db_table = "notifications"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the notifications"""
|
||||
return f"{self.receiver.email} <{self.workspace.name}>"
|
||||
@@ -1,9 +1,13 @@
|
||||
# Python imports
|
||||
from uuid import uuid4
|
||||
|
||||
# Django imports
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
from django.template.defaultfilters import slugify
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
|
||||
# Modeule imports
|
||||
from plane.db.mixins import AuditModel
|
||||
@@ -31,6 +35,10 @@ def get_default_props():
|
||||
}
|
||||
|
||||
|
||||
def get_default_preferences():
|
||||
return {"pages": {"block_display": True}}
|
||||
|
||||
|
||||
class Project(BaseModel):
|
||||
NETWORK_CHOICES = ((0, "Secret"), (2, "Public"))
|
||||
name = models.CharField(max_length=255, verbose_name="Project Name")
|
||||
@@ -46,7 +54,7 @@ class Project(BaseModel):
|
||||
"db.WorkSpace", on_delete=models.CASCADE, related_name="workspace_project"
|
||||
)
|
||||
identifier = models.CharField(
|
||||
max_length=5,
|
||||
max_length=12,
|
||||
verbose_name="Project Identifier",
|
||||
)
|
||||
default_assignee = models.ForeignKey(
|
||||
@@ -74,6 +82,15 @@ class Project(BaseModel):
|
||||
estimate = models.ForeignKey(
|
||||
"db.Estimate", on_delete=models.SET_NULL, related_name="projects", null=True
|
||||
)
|
||||
archive_in = models.IntegerField(
|
||||
default=0, validators=[MinValueValidator(0), MaxValueValidator(12)]
|
||||
)
|
||||
close_in = models.IntegerField(
|
||||
default=0, validators=[MinValueValidator(0), MaxValueValidator(12)]
|
||||
)
|
||||
default_state = models.ForeignKey(
|
||||
"db.State", on_delete=models.SET_NULL, null=True, related_name="default_state"
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the project"""
|
||||
@@ -137,6 +154,20 @@ class ProjectMember(ProjectBaseModel):
|
||||
role = models.PositiveSmallIntegerField(choices=ROLE_CHOICES, default=10)
|
||||
view_props = models.JSONField(default=get_default_props)
|
||||
default_props = models.JSONField(default=get_default_props)
|
||||
preferences = models.JSONField(default=get_default_preferences)
|
||||
sort_order = models.FloatField(default=65535)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self._state.adding:
|
||||
smallest_sort_order = ProjectMember.objects.filter(
|
||||
workspace_id=self.project.workspace_id, member=self.member
|
||||
).aggregate(smallest=models.Min("sort_order"))["smallest"]
|
||||
|
||||
# Project ordering
|
||||
if smallest_sort_order is not None:
|
||||
self.sort_order = smallest_sort_order - 10000
|
||||
|
||||
super(ProjectMember, self).save(*args, **kwargs)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["project", "member"]
|
||||
@@ -158,7 +189,7 @@ class ProjectIdentifier(AuditModel):
|
||||
project = models.OneToOneField(
|
||||
Project, on_delete=models.CASCADE, related_name="project_identifier"
|
||||
)
|
||||
name = models.CharField(max_length=10)
|
||||
name = models.CharField(max_length=12)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["name", "workspace"]
|
||||
@@ -185,3 +216,41 @@ class ProjectFavorite(ProjectBaseModel):
|
||||
def __str__(self):
|
||||
"""Return user of the project"""
|
||||
return f"{self.user.email} <{self.project.name}>"
|
||||
|
||||
|
||||
def get_anchor():
|
||||
return uuid4().hex
|
||||
|
||||
|
||||
def get_default_views():
|
||||
return {
|
||||
"list": True,
|
||||
"kanban": True,
|
||||
"calendar": True,
|
||||
"gantt": True,
|
||||
"spreadsheet": True,
|
||||
}
|
||||
|
||||
|
||||
class ProjectDeployBoard(ProjectBaseModel):
|
||||
anchor = models.CharField(
|
||||
max_length=255, default=get_anchor, unique=True, db_index=True
|
||||
)
|
||||
comments = models.BooleanField(default=False)
|
||||
reactions = models.BooleanField(default=False)
|
||||
inbox = models.ForeignKey(
|
||||
"db.Inbox", related_name="bord_inbox", on_delete=models.SET_NULL, null=True
|
||||
)
|
||||
votes = models.BooleanField(default=False)
|
||||
views = models.JSONField(default=get_default_views)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["project", "anchor"]
|
||||
verbose_name = "Project Deploy Board"
|
||||
verbose_name_plural = "Project Deploy Boards"
|
||||
db_table = "project_deploy_boards"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
"""Return project and anchor"""
|
||||
return f"{self.anchor} <{self.project.name}>"
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# Python imports
|
||||
from enum import unique
|
||||
import uuid
|
||||
import string
|
||||
import random
|
||||
|
||||
# Django imports
|
||||
from django.db import models
|
||||
@@ -19,6 +20,15 @@ from slack_sdk import WebClient
|
||||
from slack_sdk.errors import SlackApiError
|
||||
|
||||
|
||||
def get_default_onboarding():
|
||||
return {
|
||||
"profile_complete": False,
|
||||
"workspace_create": False,
|
||||
"workspace_invite": False,
|
||||
"workspace_join": False,
|
||||
}
|
||||
|
||||
|
||||
class User(AbstractBaseUser, PermissionsMixin):
|
||||
id = models.UUIDField(
|
||||
default=uuid.uuid4, unique=True, editable=False, db_index=True, primary_key=True
|
||||
@@ -31,6 +41,7 @@ class User(AbstractBaseUser, PermissionsMixin):
|
||||
first_name = models.CharField(max_length=255, blank=True)
|
||||
last_name = models.CharField(max_length=255, blank=True)
|
||||
avatar = models.CharField(max_length=255, blank=True)
|
||||
cover_image = models.URLField(blank=True, null=True, max_length=800)
|
||||
|
||||
# tracking metrics
|
||||
date_joined = models.DateTimeField(auto_now_add=True, verbose_name="Created At")
|
||||
@@ -73,6 +84,9 @@ class User(AbstractBaseUser, PermissionsMixin):
|
||||
role = models.CharField(max_length=300, null=True, blank=True)
|
||||
is_bot = models.BooleanField(default=False)
|
||||
theme = models.JSONField(default=dict)
|
||||
display_name = models.CharField(max_length=255, default="")
|
||||
is_tour_completed = models.BooleanField(default=False)
|
||||
onboarding_step = models.JSONField(default=get_default_onboarding)
|
||||
|
||||
USERNAME_FIELD = "email"
|
||||
|
||||
@@ -97,6 +111,13 @@ class User(AbstractBaseUser, PermissionsMixin):
|
||||
self.token = uuid.uuid4().hex + uuid.uuid4().hex
|
||||
self.token_updated_at = timezone.now()
|
||||
|
||||
if not self.display_name:
|
||||
self.display_name = (
|
||||
self.email.split("@")[0]
|
||||
if len(self.email.split("@"))
|
||||
else "".join(random.choice(string.ascii_letters) for _ in range(6))
|
||||
)
|
||||
|
||||
if self.is_superuser:
|
||||
self.is_staff = True
|
||||
|
||||
|
||||
@@ -14,16 +14,41 @@ ROLE_CHOICES = (
|
||||
)
|
||||
|
||||
|
||||
def get_default_props():
|
||||
return {
|
||||
"filters": {"type": None},
|
||||
"groupByProperty": None,
|
||||
"issueView": "list",
|
||||
"orderBy": "-created_at",
|
||||
"properties": {
|
||||
"assignee": True,
|
||||
"due_date": True,
|
||||
"key": True,
|
||||
"labels": True,
|
||||
"priority": True,
|
||||
"state": True,
|
||||
"sub_issue_count": True,
|
||||
"attachment_count": True,
|
||||
"link": True,
|
||||
"estimate": True,
|
||||
"created_on": True,
|
||||
"updated_on": True,
|
||||
"start_date": True,
|
||||
},
|
||||
"showEmptyGroups": True,
|
||||
}
|
||||
|
||||
|
||||
class Workspace(BaseModel):
|
||||
name = models.CharField(max_length=255, verbose_name="Workspace Name")
|
||||
name = models.CharField(max_length=80, verbose_name="Workspace Name")
|
||||
logo = models.URLField(verbose_name="Logo", blank=True, null=True)
|
||||
owner = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="owner_workspace",
|
||||
)
|
||||
slug = models.SlugField(max_length=100, db_index=True, unique=True)
|
||||
company_size = models.PositiveIntegerField(default=10)
|
||||
slug = models.SlugField(max_length=48, db_index=True, unique=True)
|
||||
organization_size = models.CharField(max_length=20)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the Workspace"""
|
||||
@@ -47,7 +72,8 @@ class WorkspaceMember(BaseModel):
|
||||
)
|
||||
role = models.PositiveSmallIntegerField(choices=ROLE_CHOICES, default=10)
|
||||
company_role = models.TextField(null=True, blank=True)
|
||||
view_props = models.JSONField(null=True, blank=True)
|
||||
view_props = models.JSONField(default=get_default_props)
|
||||
default_props = models.JSONField(default=get_default_props)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["workspace", "member"]
|
||||
|
||||
@@ -35,6 +35,7 @@ INSTALLED_APPS = [
|
||||
"rest_framework_simplejwt.token_blacklist",
|
||||
"corsheaders",
|
||||
"taggit",
|
||||
"django_celery_beat",
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
@@ -213,3 +214,4 @@ SIMPLE_JWT = {
|
||||
CELERY_TIMEZONE = TIME_ZONE
|
||||
CELERY_TASK_SERIALIZER = 'json'
|
||||
CELERY_ACCEPT_CONTENT = ['application/json']
|
||||
CELERY_IMPORTS = ("plane.bgtasks.issue_automation_task","plane.bgtasks.exporter_expired_task")
|
||||
|
||||
@@ -10,16 +10,14 @@ from sentry_sdk.integrations.redis import RedisIntegration
|
||||
|
||||
from .common import * # noqa
|
||||
|
||||
DEBUG = int(os.environ.get(
|
||||
"DEBUG", 1
|
||||
)) == 1
|
||||
DEBUG = int(os.environ.get("DEBUG", 1)) == 1
|
||||
|
||||
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
||||
|
||||
|
||||
DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.postgresql_psycopg2",
|
||||
"ENGINE": "django.db.backends.postgresql",
|
||||
"NAME": os.environ.get("PGUSER", "plane"),
|
||||
"USER": "",
|
||||
"PASSWORD": "",
|
||||
@@ -27,13 +25,11 @@ DATABASES = {
|
||||
}
|
||||
}
|
||||
|
||||
DOCKERIZED = int(os.environ.get(
|
||||
"DOCKERIZED", 0
|
||||
)) == 1
|
||||
DOCKERIZED = int(os.environ.get("DOCKERIZED", 0)) == 1
|
||||
|
||||
USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1
|
||||
|
||||
FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
|
||||
FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
|
||||
|
||||
if DOCKERIZED:
|
||||
DATABASES["default"] = dj_database_url.config()
|
||||
@@ -63,7 +59,29 @@ if os.environ.get("SENTRY_DSN", False):
|
||||
send_default_pii=True,
|
||||
environment="local",
|
||||
traces_sample_rate=0.7,
|
||||
profiles_sample_rate=1.0,
|
||||
)
|
||||
else:
|
||||
LOGGING = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"handlers": {
|
||||
"console": {
|
||||
"class": "logging.StreamHandler",
|
||||
},
|
||||
},
|
||||
"root": {
|
||||
"handlers": ["console"],
|
||||
"level": "DEBUG",
|
||||
},
|
||||
"loggers": {
|
||||
"*": {
|
||||
"handlers": ["console"],
|
||||
"level": "DEBUG",
|
||||
"propagate": True,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
REDIS_HOST = "localhost"
|
||||
REDIS_PORT = 6379
|
||||
@@ -82,8 +100,9 @@ PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False)
|
||||
ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False)
|
||||
ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False)
|
||||
|
||||
OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1")
|
||||
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False)
|
||||
GPT_ENGINE = os.environ.get("GPT_ENGINE", "text-davinci-003")
|
||||
GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo")
|
||||
|
||||
SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN", False)
|
||||
|
||||
@@ -94,4 +113,4 @@ CELERY_BROKER_URL = os.environ.get("REDIS_URL")
|
||||
|
||||
GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False)
|
||||
|
||||
ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1"
|
||||
ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1"
|
||||
|
||||
@@ -13,13 +13,11 @@ from sentry_sdk.integrations.redis import RedisIntegration
|
||||
from .common import * # noqa
|
||||
|
||||
# Database
|
||||
DEBUG = int(os.environ.get(
|
||||
"DEBUG", 0
|
||||
)) == 1
|
||||
DEBUG = int(os.environ.get("DEBUG", 0)) == 1
|
||||
|
||||
DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.postgresql_psycopg2",
|
||||
"ENGINE": "django.db.backends.postgresql",
|
||||
"NAME": "plane",
|
||||
"USER": os.environ.get("PGUSER", ""),
|
||||
"PASSWORD": os.environ.get("PGPASSWORD", ""),
|
||||
@@ -72,8 +70,14 @@ CORS_ALLOW_HEADERS = [
|
||||
]
|
||||
|
||||
CORS_ALLOW_CREDENTIALS = True
|
||||
# Simplified static file serving.
|
||||
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
||||
|
||||
INSTALLED_APPS += ("scout_apm.django",)
|
||||
|
||||
STORAGES = {
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||
},
|
||||
}
|
||||
|
||||
if bool(os.environ.get("SENTRY_DSN", False)):
|
||||
sentry_sdk.init(
|
||||
@@ -84,11 +88,12 @@ if bool(os.environ.get("SENTRY_DSN", False)):
|
||||
traces_sample_rate=1,
|
||||
send_default_pii=True,
|
||||
environment="production",
|
||||
profiles_sample_rate=1.0,
|
||||
)
|
||||
|
||||
if DOCKERIZED and USE_MINIO:
|
||||
INSTALLED_APPS += ("storages",)
|
||||
DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage"
|
||||
STORAGES["default"] = {"BACKEND": "storages.backends.s3boto3.S3Boto3Storage"}
|
||||
# The AWS access key to use.
|
||||
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key")
|
||||
# The AWS secret access key to use.
|
||||
@@ -96,7 +101,9 @@ if DOCKERIZED and USE_MINIO:
|
||||
# The name of the bucket to store files in.
|
||||
AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads")
|
||||
# The full URL to the S3 endpoint. Leave blank to use the default region URL.
|
||||
AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "http://plane-minio:9000")
|
||||
AWS_S3_ENDPOINT_URL = os.environ.get(
|
||||
"AWS_S3_ENDPOINT_URL", "http://plane-minio:9000"
|
||||
)
|
||||
# Default permissions
|
||||
AWS_DEFAULT_ACL = "public-read"
|
||||
AWS_QUERYSTRING_AUTH = False
|
||||
@@ -187,7 +194,10 @@ else:
|
||||
# extra characters appended.
|
||||
AWS_S3_FILE_OVERWRITE = False
|
||||
|
||||
DEFAULT_FILE_STORAGE = "django_s3_storage.storage.S3Storage"
|
||||
STORAGES["default"] = {
|
||||
"BACKEND": "django_s3_storage.storage.S3Storage",
|
||||
}
|
||||
|
||||
# AWS Settings End
|
||||
|
||||
# Enable Connection Pooling (if desired)
|
||||
@@ -202,9 +212,6 @@ ALLOWED_HOSTS = [
|
||||
]
|
||||
|
||||
|
||||
# Simplified static file serving.
|
||||
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
||||
|
||||
SESSION_COOKIE_SECURE = True
|
||||
CSRF_COOKIE_SECURE = True
|
||||
|
||||
@@ -241,8 +248,9 @@ PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False)
|
||||
ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False)
|
||||
ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False)
|
||||
|
||||
OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1")
|
||||
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False)
|
||||
GPT_ENGINE = os.environ.get("GPT_ENGINE", "text-davinci-003")
|
||||
GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo")
|
||||
|
||||
SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN", False)
|
||||
|
||||
@@ -264,3 +272,8 @@ GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False)
|
||||
|
||||
|
||||
ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1"
|
||||
|
||||
# Scout Settings
|
||||
SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False)
|
||||
SCOUT_KEY = os.environ.get("SCOUT_KEY", "")
|
||||
SCOUT_NAME = "Plane"
|
||||
|
||||
@@ -11,13 +11,12 @@ from sentry_sdk.integrations.django import DjangoIntegration
|
||||
from sentry_sdk.integrations.redis import RedisIntegration
|
||||
|
||||
from .common import * # noqa
|
||||
|
||||
# Database
|
||||
DEBUG = int(os.environ.get(
|
||||
"DEBUG", 1
|
||||
)) == 1
|
||||
DEBUG = int(os.environ.get("DEBUG", 1)) == 1
|
||||
DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.postgresql_psycopg2",
|
||||
"ENGINE": "django.db.backends.postgresql",
|
||||
"NAME": os.environ.get("PGUSER", "plane"),
|
||||
"USER": "",
|
||||
"PASSWORD": "",
|
||||
@@ -48,13 +47,15 @@ ALLOWED_HOSTS = ["*"]
|
||||
# TODO: Make it FALSE and LIST DOMAINS IN FULL PROD.
|
||||
CORS_ALLOW_ALL_ORIGINS = True
|
||||
|
||||
# Simplified static file serving.
|
||||
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
||||
STORAGES = {
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Make true if running in a docker environment
|
||||
DOCKERIZED = int(os.environ.get(
|
||||
"DOCKERIZED", 0
|
||||
)) == 1
|
||||
DOCKERIZED = int(os.environ.get("DOCKERIZED", 0)) == 1
|
||||
FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
|
||||
USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1
|
||||
|
||||
@@ -66,6 +67,7 @@ sentry_sdk.init(
|
||||
traces_sample_rate=1,
|
||||
send_default_pii=True,
|
||||
environment="staging",
|
||||
profiles_sample_rate=1.0,
|
||||
)
|
||||
|
||||
# The AWS region to connect to.
|
||||
@@ -150,7 +152,9 @@ AWS_S3_SIGNATURE_VERSION = None
|
||||
AWS_S3_FILE_OVERWRITE = False
|
||||
|
||||
# AWS Settings End
|
||||
|
||||
STORAGES["default"] = {
|
||||
"BACKEND": "django_s3_storage.storage.S3Storage",
|
||||
}
|
||||
|
||||
# Enable Connection Pooling (if desired)
|
||||
# DATABASES['default']['ENGINE'] = 'django_postgrespool'
|
||||
@@ -163,11 +167,6 @@ ALLOWED_HOSTS = [
|
||||
"*",
|
||||
]
|
||||
|
||||
|
||||
DEFAULT_FILE_STORAGE = "django_s3_storage.storage.S3Storage"
|
||||
# Simplified static file serving.
|
||||
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
||||
|
||||
SESSION_COOKIE_SECURE = True
|
||||
CSRF_COOKIE_SECURE = True
|
||||
|
||||
@@ -199,15 +198,19 @@ PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False)
|
||||
ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False)
|
||||
ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False)
|
||||
|
||||
|
||||
OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1")
|
||||
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False)
|
||||
GPT_ENGINE = os.environ.get("GPT_ENGINE", "text-davinci-003")
|
||||
GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo")
|
||||
|
||||
SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN", False)
|
||||
|
||||
LOGGER_BASE_URL = os.environ.get("LOGGER_BASE_URL", False)
|
||||
|
||||
redis_url = os.environ.get("REDIS_URL")
|
||||
broker_url = f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}"
|
||||
broker_url = (
|
||||
f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}"
|
||||
)
|
||||
|
||||
CELERY_RESULT_BACKEND = broker_url
|
||||
CELERY_BROKER_URL = broker_url
|
||||
|
||||
@@ -3,11 +3,10 @@
|
||||
"""
|
||||
|
||||
# from django.contrib import admin
|
||||
from django.urls import path
|
||||
from django.urls import path, include, re_path
|
||||
from django.views.generic import TemplateView
|
||||
|
||||
from django.conf import settings
|
||||
from django.conf.urls import include, url, static
|
||||
|
||||
# from django.conf.urls.static import static
|
||||
|
||||
@@ -18,11 +17,10 @@ urlpatterns = [
|
||||
path("", include("plane.web.urls")),
|
||||
]
|
||||
|
||||
urlpatterns = urlpatterns + static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||
|
||||
if settings.DEBUG:
|
||||
import debug_toolbar
|
||||
|
||||
urlpatterns = [
|
||||
url(r"^__debug__/", include(debug_toolbar.urls)),
|
||||
re_path(r"^__debug__/", include(debug_toolbar.urls)),
|
||||
] + urlpatterns
|
||||
|
||||
@@ -12,6 +12,18 @@ def filter_state(params, filter, method):
|
||||
return filter
|
||||
|
||||
|
||||
def filter_state_group(params, filter, method):
|
||||
if method == "GET":
|
||||
state_group = params.get("state_group").split(",")
|
||||
if len(state_group) and "" not in state_group:
|
||||
filter["state__group__in"] = state_group
|
||||
else:
|
||||
if params.get("state_group", None) and len(params.get("state_group")):
|
||||
filter["state__group__in"] = params.get("state_group")
|
||||
return filter
|
||||
|
||||
|
||||
|
||||
def filter_estimate_point(params, filter, method):
|
||||
if method == "GET":
|
||||
estimate_points = params.get("estimate_point").split(",")
|
||||
@@ -112,10 +124,11 @@ def filter_created_at(params, filter, method):
|
||||
else:
|
||||
if params.get("created_at", None) and len(params.get("created_at")):
|
||||
for query in params.get("created_at"):
|
||||
if query.get("timeline", "after") == "after":
|
||||
filter["created_at__date__gte"] = query.get("datetime")
|
||||
created_at_query = query.split(";")
|
||||
if len(created_at_query) == 2 and "after" in created_at_query:
|
||||
filter["created_at__date__gte"] = created_at_query[0]
|
||||
else:
|
||||
filter["created_at__date__lte"] = query.get("datetime")
|
||||
filter["created_at__date__lte"] = created_at_query[0]
|
||||
return filter
|
||||
|
||||
|
||||
@@ -132,10 +145,11 @@ def filter_updated_at(params, filter, method):
|
||||
else:
|
||||
if params.get("updated_at", None) and len(params.get("updated_at")):
|
||||
for query in params.get("updated_at"):
|
||||
if query.get("timeline", "after") == "after":
|
||||
filter["updated_at__date__gte"] = query.get("datetime")
|
||||
updated_at_query = query.split(";")
|
||||
if len(updated_at_query) == 2 and "after" in updated_at_query:
|
||||
filter["updated_at__date__gte"] = updated_at_query[0]
|
||||
else:
|
||||
filter["updated_at__date__lte"] = query.get("datetime")
|
||||
filter["updated_at__date__lte"] = updated_at_query[0]
|
||||
return filter
|
||||
|
||||
|
||||
@@ -152,10 +166,11 @@ def filter_start_date(params, filter, method):
|
||||
else:
|
||||
if params.get("start_date", None) and len(params.get("start_date")):
|
||||
for query in params.get("start_date"):
|
||||
if query.get("timeline", "after") == "after":
|
||||
filter["start_date__gte"] = query.get("datetime")
|
||||
start_date_query = query.split(";")
|
||||
if len(start_date_query) == 2 and "after" in start_date_query:
|
||||
filter["start_date__gte"] = start_date_query[0]
|
||||
else:
|
||||
filter["start_date__lte"] = query.get("datetime")
|
||||
filter["start_date__lte"] = start_date_query[0]
|
||||
return filter
|
||||
|
||||
|
||||
@@ -166,16 +181,17 @@ def filter_target_date(params, filter, method):
|
||||
for query in target_dates:
|
||||
target_date_query = query.split(";")
|
||||
if len(target_date_query) == 2 and "after" in target_date_query:
|
||||
filter["target_date__gte"] = target_date_query[0]
|
||||
filter["target_date__gt"] = target_date_query[0]
|
||||
else:
|
||||
filter["target_date__lte"] = target_date_query[0]
|
||||
filter["target_date__lt"] = target_date_query[0]
|
||||
else:
|
||||
if params.get("target_date", None) and len(params.get("target_date")):
|
||||
for query in params.get("target_date"):
|
||||
if query.get("timeline", "after") == "after":
|
||||
filter["target_date__gte"] = query.get("datetime")
|
||||
target_date_query = query.split(";")
|
||||
if len(target_date_query) == 2 and "after" in target_date_query:
|
||||
filter["target_date__gt"] = target_date_query[0]
|
||||
else:
|
||||
filter["target_date__lte"] = query.get("datetime")
|
||||
filter["target_date__lt"] = target_date_query[0]
|
||||
|
||||
return filter
|
||||
|
||||
@@ -193,10 +209,11 @@ def filter_completed_at(params, filter, method):
|
||||
else:
|
||||
if params.get("completed_at", None) and len(params.get("completed_at")):
|
||||
for query in params.get("completed_at"):
|
||||
if query.get("timeline", "after") == "after":
|
||||
filter["completed_at__date__gte"] = query.get("datetime")
|
||||
completed_at_query = query.split(";")
|
||||
if len(completed_at_query) == 2 and "after" in completed_at_query:
|
||||
filter["completed_at__date__gte"] = completed_at_query[0]
|
||||
else:
|
||||
filter["completed_at__lte"] = query.get("datetime")
|
||||
filter["completed_at__lte"] = completed_at_query[0]
|
||||
return filter
|
||||
|
||||
|
||||
@@ -212,6 +229,7 @@ def filter_issue_state_type(params, filter, method):
|
||||
return filter
|
||||
|
||||
|
||||
|
||||
def filter_project(params, filter, method):
|
||||
if method == "GET":
|
||||
projects = params.get("project").split(",")
|
||||
@@ -268,11 +286,31 @@ def filter_sub_issue_toggle(params, filter, method):
|
||||
return filter
|
||||
|
||||
|
||||
def filter_subscribed_issues(params, filter, method):
|
||||
if method == "GET":
|
||||
subscribers = params.get("subscriber").split(",")
|
||||
if len(subscribers) and "" not in subscribers:
|
||||
filter["issue_subscribers__subscriber_id__in"] = subscribers
|
||||
else:
|
||||
if params.get("subscriber", None) and len(params.get("subscriber")):
|
||||
filter["issue_subscribers__subscriber_id__in"] = params.get("subscriber")
|
||||
return filter
|
||||
|
||||
|
||||
def filter_start_target_date_issues(params, filter, method):
|
||||
start_target_date = params.get("start_target_date", "false")
|
||||
if start_target_date == "true":
|
||||
filter["target_date__isnull"] = False
|
||||
filter["start_date__isnull"] = False
|
||||
return filter
|
||||
|
||||
|
||||
def issue_filters(query_params, method):
|
||||
filter = dict()
|
||||
|
||||
ISSUE_FILTER = {
|
||||
"state": filter_state,
|
||||
"state_group": filter_state_group,
|
||||
"estimate_point": filter_estimate_point,
|
||||
"priority": filter_priority,
|
||||
"parent": filter_parent,
|
||||
@@ -291,6 +329,8 @@ def issue_filters(query_params, method):
|
||||
"module": filter_module,
|
||||
"inbox_status": filter_inbox_status,
|
||||
"sub_issue": filter_sub_issue_toggle,
|
||||
"subscriber": filter_subscribed_issues,
|
||||
"start_target_date": filter_start_target_date_issues,
|
||||
}
|
||||
|
||||
for key, value in ISSUE_FILTER.items():
|
||||
|
||||
@@ -1,31 +1,36 @@
|
||||
# base requirements
|
||||
|
||||
Django==3.2.19
|
||||
Django==4.2.3
|
||||
django-braces==1.15.0
|
||||
django-taggit==3.1.0
|
||||
psycopg2==2.9.5
|
||||
django-oauth-toolkit==2.2.0
|
||||
mistune==2.0.4
|
||||
django-taggit==4.0.0
|
||||
psycopg==3.1.9
|
||||
django-oauth-toolkit==2.3.0
|
||||
mistune==3.0.1
|
||||
djangorestframework==3.14.0
|
||||
redis==4.5.4
|
||||
redis==4.6.0
|
||||
django-nested-admin==4.0.2
|
||||
django-cors-headers==3.13.0
|
||||
whitenoise==6.3.0
|
||||
django-allauth==0.52.0
|
||||
faker==13.4.0
|
||||
django-filter==22.1
|
||||
django-cors-headers==4.1.0
|
||||
whitenoise==6.5.0
|
||||
django-allauth==0.54.0
|
||||
faker==18.11.2
|
||||
django-filter==23.2
|
||||
jsonmodels==2.6.0
|
||||
djangorestframework-simplejwt==5.2.2
|
||||
sentry-sdk==1.14.0
|
||||
django-s3-storage==0.13.11
|
||||
sentry-sdk==1.27.0
|
||||
django-s3-storage==0.14.0
|
||||
django-crum==0.7.9
|
||||
django-guardian==2.4.0
|
||||
dj_rest_auth==2.2.5
|
||||
google-auth==2.16.0
|
||||
google-api-python-client==2.75.0
|
||||
django-redis==5.2.0
|
||||
uvicorn==0.20.0
|
||||
google-auth==2.21.0
|
||||
google-api-python-client==2.92.0
|
||||
django-redis==5.3.0
|
||||
uvicorn==0.22.0
|
||||
channels==4.0.0
|
||||
openai==0.27.2
|
||||
slack-sdk==3.20.2
|
||||
celery==5.2.7
|
||||
openai==0.27.8
|
||||
slack-sdk==3.21.3
|
||||
celery==5.3.1
|
||||
django_celery_beat==2.5.0
|
||||
psycopg-binary==3.1.9
|
||||
psycopg-c==3.1.9
|
||||
scout-apm==2.26.1
|
||||
openpyxl==3.1.2
|
||||
@@ -1,3 +1,3 @@
|
||||
-r base.txt
|
||||
|
||||
django-debug-toolbar==3.8.1
|
||||
django-debug-toolbar==4.1.0
|
||||
@@ -1,12 +1,11 @@
|
||||
-r base.txt
|
||||
|
||||
dj-database-url==1.2.0
|
||||
dj-database-url==2.0.0
|
||||
gunicorn==20.1.0
|
||||
whitenoise==6.3.0
|
||||
whitenoise==6.5.0
|
||||
django-storages==1.13.2
|
||||
boto3==1.26.136
|
||||
django-anymail==9.0
|
||||
twilio==7.16.2
|
||||
django-debug-toolbar==3.8.1
|
||||
gevent==22.10.2
|
||||
boto3==1.27.0
|
||||
django-anymail==10.0
|
||||
django-debug-toolbar==4.1.0
|
||||
gevent==23.7.0
|
||||
psycogreen==1.0.2
|
||||
@@ -89,8 +89,8 @@
|
||||
</tr>
|
||||
<tr>
|
||||
<td height="18" align="center" valign="top" class="r15-i nl2go-default-textstyle" style="color: #3b3f44; font-family: arial,helvetica,sans-serif; font-size: 16px; line-height: 1.5;">
|
||||
<!--[if mso]>
|
||||
<v:roundrect xmlns:v="urn:schemas-microsoft-com:vml" xmlns:w="urn:schemas-microsoft-com:office:word" href="https://app.plane.so/" style="v-text-anchor:middle; height: 33px; width: 301px;" arcsize="12%" fillcolor="#ffffff" strokecolor="#3f76ff" strokeweight="1px" data-btn="1">
|
||||
<!--[if mso]>
|
||||
<v:roundrect xmlns:v="urn:schemas-microsoft-com:vml" xmlns:w="urn:schemas-microsoft-com:office:word" href={{magic_url}} style="v-text-anchor:middle; height: 33px; width: 301px;" arcsize="12%" fillcolor="#ffffff" strokecolor="#3f76ff" strokeweight="1px" data-btn="1">
|
||||
<w:anchorlock/>
|
||||
<div style="display:none;">
|
||||
<center class="default-button">
|
||||
@@ -98,11 +98,11 @@
|
||||
</center>
|
||||
</div>
|
||||
</v:roundrect>
|
||||
<![endif]--> <!--[if !mso]><!-- -->
|
||||
<![endif]--> <!--[if !mso]><!-- -->
|
||||
<a href={{magic_url}} class="r16-r default-button" target="_blank" data-btn="1" style="font-style: normal; font-weight: bold; line-height: 1.15; text-decoration: none; border-style: solid; word-wrap: break-word; display: inline-block; -webkit-text-size-adjust: none; mso-hide: all; background-color: #ffffff; border-color: #3f76ff; border-radius: 4px; border-width: 1px; color: #ffffff; font-family: arial,helvetica,sans-serif; font-size: 16px; height: 18px; padding-bottom: 7px; padding-left: 20px; padding-right: 20px; padding-top: 7px; width: 258px;">
|
||||
<p style="margin: 0;"><span style="color: #3F76FF; font-size: 14px;">Open Plane</span></p>
|
||||
</a>
|
||||
<!--<![endif]-->
|
||||
<!--<![endif]-->
|
||||
</td>
|
||||
</tr>
|
||||
<tr class="nl2go-responsive-hide">
|
||||
@@ -364,4 +364,4 @@
|
||||
</tr>
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
|
||||
9
apiserver/templates/emails/exports/issues.html
Normal file
9
apiserver/templates/emails/exports/issues.html
Normal file
@@ -0,0 +1,9 @@
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
||||
<html>
|
||||
Dear {{username}},<br/>
|
||||
Your requested Issue's data has been successfully exported from Plane. The export includes all relevant information about issues you requested from your selected projects.</br>
|
||||
Please find the attachment and download the CSV file. If you have any questions or need further assistance, please don't hesitate to contact our support team at <a href = "mailto: engineering@plane.com">engineering@plane.so</a>. We're here to help!</br>
|
||||
Thank you for using Plane. We hope this export will aid you in effectively managing your projects.</br>
|
||||
Regards,
|
||||
Team Plane
|
||||
</html>
|
||||
@@ -90,8 +90,8 @@
|
||||
</tr>
|
||||
<tr>
|
||||
<td height="18" align="center" valign="top" class="r15-i nl2go-default-textstyle" style="color: #3b3f44; font-family: arial,helvetica,sans-serif; font-size: 16px; line-height: 1.5;">
|
||||
<!--[if mso]>
|
||||
<v:roundrect xmlns:v="urn:schemas-microsoft-com:vml" xmlns:w="urn:schemas-microsoft-com:office:word" href="https://app.plane.so/" style="v-text-anchor:middle; height: 33px; width: 301px;" arcsize="12%" fillcolor="#ffffff" strokecolor="#3f76ff" strokeweight="1px" data-btn="1">
|
||||
<!--[if mso]>
|
||||
<v:roundrect xmlns:v="urn:schemas-microsoft-com:vml" xmlns:w="urn:schemas-microsoft-com:office:word" href={{invitation_url}} style="v-text-anchor:middle; height: 33px; width: 301px;" arcsize="12%" fillcolor="#ffffff" strokecolor="#3f76ff" strokeweight="1px" data-btn="1">
|
||||
<w:anchorlock/>
|
||||
<div style="display:none;">
|
||||
<center class="default-button">
|
||||
@@ -99,11 +99,11 @@
|
||||
</center>
|
||||
</div>
|
||||
</v:roundrect>
|
||||
<![endif]--> <!--[if !mso]><!-- -->
|
||||
<![endif]--> <!--[if !mso]><!-- -->
|
||||
<a href={{invitation_url}} class="r16-r default-button" target="_blank" data-btn="1" style="font-style: normal; font-weight: bold; line-height: 1.15; text-decoration: none; border-style: solid; word-wrap: break-word; display: inline-block; -webkit-text-size-adjust: none; mso-hide: all; background-color: #ffffff; border-color: #3f76ff; border-radius: 4px; border-width: 1px; color: #ffffff; font-family: arial,helvetica,sans-serif; font-size: 16px; height: 18px; padding-bottom: 7px; padding-left: 20px; padding-right: 20px; padding-top: 7px; width: 258px;">
|
||||
<p style="margin: 0;"><span style="color: #3F76FF;">Accept the invite</span></p>
|
||||
</a>
|
||||
<!--<![endif]-->
|
||||
<!--<![endif]-->
|
||||
</td>
|
||||
</tr>
|
||||
<tr class="nl2go-responsive-hide">
|
||||
@@ -346,4 +346,4 @@
|
||||
</tr>
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
|
||||
@@ -90,8 +90,8 @@
|
||||
</tr>
|
||||
<tr>
|
||||
<td height="18" align="center" valign="top" class="r15-i nl2go-default-textstyle" style="color: #3b3f44; font-family: arial,helvetica,sans-serif; font-size: 16px; line-height: 1.5;">
|
||||
<!--[if mso]>
|
||||
<v:roundrect xmlns:v="urn:schemas-microsoft-com:vml" xmlns:w="urn:schemas-microsoft-com:office:word" href="https://app.plane.so/" style="v-text-anchor:middle; height: 33px; width: 301px;" arcsize="12%" fillcolor="#ffffff" strokecolor="#3f76ff" strokeweight="1px" data-btn="1">
|
||||
<!--[if mso]>
|
||||
<v:roundrect xmlns:v="urn:schemas-microsoft-com:vml" xmlns:w="urn:schemas-microsoft-com:office:word" href={{invitation_url}} style="v-text-anchor:middle; height: 33px; width: 301px;" arcsize="12%" fillcolor="#ffffff" strokecolor="#3f76ff" strokeweight="1px" data-btn="1">
|
||||
<w:anchorlock/>
|
||||
<div style="display:none;">
|
||||
<center class="default-button">
|
||||
@@ -99,11 +99,11 @@
|
||||
</center>
|
||||
</div>
|
||||
</v:roundrect>
|
||||
<![endif]--> <!--[if !mso]><!-- -->
|
||||
<![endif]--> <!--[if !mso]><!-- -->
|
||||
<a href={{invitation_url}} class="r16-r default-button" target="_blank" data-btn="1" style="font-style: normal; font-weight: bold; line-height: 1.15; text-decoration: none; border-style: solid; word-wrap: break-word; display: inline-block; -webkit-text-size-adjust: none; mso-hide: all; background-color: #ffffff; border-color: #3f76ff; border-radius: 4px; border-width: 1px; color: #ffffff; font-family: arial,helvetica,sans-serif; font-size: 16px; height: 18px; padding-bottom: 7px; padding-left: 20px; padding-right: 20px; padding-top: 7px; width: 258px;">
|
||||
<p style="margin: 0;"><span style="color: #3F76FF;">Accept the invite</span></p>
|
||||
</a>
|
||||
<!--<![endif]-->
|
||||
<!--<![endif]-->
|
||||
</td>
|
||||
</tr>
|
||||
<tr class="nl2go-responsive-hide">
|
||||
@@ -346,4 +346,4 @@
|
||||
</tr>
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
|
||||
@@ -20,7 +20,7 @@ ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
|
||||
COPY .gitignore .gitignore
|
||||
COPY --from=builder /app/out/json/ .
|
||||
COPY --from=builder /app/out/yarn.lock ./yarn.lock
|
||||
RUN yarn install
|
||||
RUN yarn install --network-timeout 500000
|
||||
|
||||
# Build the project
|
||||
COPY --from=builder /app/out/full/ .
|
||||
@@ -33,8 +33,8 @@ RUN yarn turbo run build --filter=app
|
||||
|
||||
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
||||
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
||||
|
||||
RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL}
|
||||
ENV NEXT_PUBLIC_DEPLOY_URL=${NEXT_PUBLIC_DEPLOY_URL}
|
||||
RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL} app
|
||||
|
||||
FROM node:18-alpine AS runner
|
||||
WORKDIR /app
|
||||
|
||||
@@ -32,6 +32,7 @@ export const EmailCodeForm = ({ handleSignIn }: any) => {
|
||||
setError,
|
||||
setValue,
|
||||
getValues,
|
||||
watch,
|
||||
formState: { errors, isSubmitting, isValid, isDirty },
|
||||
} = useForm<EmailCodeFormValues>({
|
||||
defaultValues: {
|
||||
@@ -112,43 +113,35 @@ export const EmailCodeForm = ({ handleSignIn }: any) => {
|
||||
|
||||
return (
|
||||
<>
|
||||
<form className="space-y-5 py-5 px-5">
|
||||
{(codeSent || codeResent) && (
|
||||
<div className="rounded-md bg-green-500/20 p-4">
|
||||
<div className="flex">
|
||||
<div className="flex-shrink-0">
|
||||
<CheckCircleIcon className="h-5 w-5 text-green-500" aria-hidden="true" />
|
||||
</div>
|
||||
<div className="ml-3">
|
||||
<p className="text-sm font-medium text-green-500">
|
||||
{codeResent
|
||||
? "Please check your mail for new code."
|
||||
: "Please check your mail for code."}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
<div>
|
||||
{(codeSent || codeResent) && (
|
||||
<p className="text-center mt-4">
|
||||
We have sent the sign in code.
|
||||
<br />
|
||||
Please check your inbox at <span className="font-medium">{watch("email")}</span>
|
||||
</p>
|
||||
)}
|
||||
<form className="space-y-4 mt-10 sm:w-[360px] mx-auto">
|
||||
<div className="space-y-1">
|
||||
<Input
|
||||
id="email"
|
||||
type="email"
|
||||
name="email"
|
||||
register={register}
|
||||
validations={{
|
||||
required: "Email ID is required",
|
||||
required: "Email address is required",
|
||||
validate: (value) =>
|
||||
/^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/.test(
|
||||
value
|
||||
) || "Email ID is not valid",
|
||||
) || "Email address is not valid",
|
||||
}}
|
||||
error={errors.email}
|
||||
placeholder="Enter your Email ID"
|
||||
placeholder="Enter your email address..."
|
||||
className="border-custom-border-300 h-[46px]"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{codeSent && (
|
||||
<div>
|
||||
<>
|
||||
<Input
|
||||
id="token"
|
||||
type="token"
|
||||
@@ -158,14 +151,15 @@ export const EmailCodeForm = ({ handleSignIn }: any) => {
|
||||
required: "Code is required",
|
||||
}}
|
||||
error={errors.token}
|
||||
placeholder="Enter code"
|
||||
placeholder="Enter code..."
|
||||
className="border-custom-border-300 h-[46px]"
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
className={`mt-5 flex w-full justify-end text-xs outline-none ${
|
||||
className={`flex w-full justify-end text-xs outline-none ${
|
||||
isResendDisabled
|
||||
? "cursor-default text-brand-secondary"
|
||||
: "cursor-pointer text-brand-accent"
|
||||
? "cursor-default text-custom-text-200"
|
||||
: "cursor-pointer text-custom-primary-100"
|
||||
} `}
|
||||
onClick={() => {
|
||||
setIsCodeResending(true);
|
||||
@@ -178,46 +172,43 @@ export const EmailCodeForm = ({ handleSignIn }: any) => {
|
||||
disabled={isResendDisabled}
|
||||
>
|
||||
{resendCodeTimer > 0 ? (
|
||||
<p className="text-right">
|
||||
Didn{"'"}t receive code? Get new code in {resendCodeTimer} seconds.
|
||||
</p>
|
||||
<span className="text-right">Request new code in {resendCodeTimer} seconds</span>
|
||||
) : isCodeResending ? (
|
||||
"Sending code..."
|
||||
"Sending new code..."
|
||||
) : errorResendingCode ? (
|
||||
"Please try again later"
|
||||
) : (
|
||||
"Resend code"
|
||||
<span className="font-medium">Resend code</span>
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
{codeSent ? (
|
||||
<PrimaryButton
|
||||
type="submit"
|
||||
className="w-full text-center h-[46px]"
|
||||
size="md"
|
||||
onClick={handleSubmit(handleSignin)}
|
||||
disabled={!isValid && isDirty}
|
||||
loading={isLoading}
|
||||
>
|
||||
{isLoading ? "Signing in..." : "Sign in"}
|
||||
</PrimaryButton>
|
||||
) : (
|
||||
<PrimaryButton
|
||||
className="w-full text-center h-[46px]"
|
||||
size="md"
|
||||
onClick={() => {
|
||||
handleSubmit(onSubmit)().then(() => {
|
||||
setResendCodeTimer(30);
|
||||
});
|
||||
}}
|
||||
disabled={!isValid && isDirty}
|
||||
loading={isSubmitting}
|
||||
>
|
||||
{isSubmitting ? "Sending code..." : "Send sign in code"}
|
||||
</PrimaryButton>
|
||||
)}
|
||||
<div>
|
||||
{codeSent ? (
|
||||
<PrimaryButton
|
||||
type="submit"
|
||||
className="w-full text-center"
|
||||
size="md"
|
||||
onClick={handleSubmit(handleSignin)}
|
||||
disabled={!isValid && isDirty}
|
||||
loading={isLoading}
|
||||
>
|
||||
{isLoading ? "Signing in..." : "Sign in"}
|
||||
</PrimaryButton>
|
||||
) : (
|
||||
<PrimaryButton
|
||||
className="w-full text-center"
|
||||
size="md"
|
||||
onClick={() => {
|
||||
handleSubmit(onSubmit)().then(() => {
|
||||
setResendCodeTimer(30);
|
||||
});
|
||||
}}
|
||||
loading={isSubmitting || (!isValid && isDirty)}
|
||||
>
|
||||
{isSubmitting ? "Sending code..." : "Send code"}
|
||||
</PrimaryButton>
|
||||
)}
|
||||
</div>
|
||||
</form>
|
||||
</>
|
||||
);
|
||||
|
||||
@@ -8,7 +8,7 @@ import { useForm } from "react-hook-form";
|
||||
// components
|
||||
import { EmailResetPasswordForm } from "components/account";
|
||||
// ui
|
||||
import { Input, SecondaryButton } from "components/ui";
|
||||
import { Input, PrimaryButton } from "components/ui";
|
||||
// types
|
||||
type EmailPasswordFormValues = {
|
||||
email: string;
|
||||
@@ -42,28 +42,39 @@ export const EmailPasswordForm: React.FC<Props> = ({ onSubmit }) => {
|
||||
|
||||
return (
|
||||
<>
|
||||
<h1 className="text-center text-2xl sm:text-2.5xl font-semibold text-custom-text-100">
|
||||
{isResettingPassword
|
||||
? "Reset your password"
|
||||
: isSignUpPage
|
||||
? "Sign up on Plane"
|
||||
: "Sign in to Plane"}
|
||||
</h1>
|
||||
{isResettingPassword ? (
|
||||
<EmailResetPasswordForm setIsResettingPassword={setIsResettingPassword} />
|
||||
) : (
|
||||
<form className="mt-5 py-5 px-5" onSubmit={handleSubmit(onSubmit)}>
|
||||
<div>
|
||||
<form
|
||||
className="space-y-4 mt-10 w-full sm:w-[360px] mx-auto"
|
||||
onSubmit={handleSubmit(onSubmit)}
|
||||
>
|
||||
<div className="space-y-1">
|
||||
<Input
|
||||
id="email"
|
||||
type="email"
|
||||
name="email"
|
||||
register={register}
|
||||
validations={{
|
||||
required: "Email ID is required",
|
||||
required: "Email address is required",
|
||||
validate: (value) =>
|
||||
/^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/.test(
|
||||
value
|
||||
) || "Email ID is not valid",
|
||||
) || "Email address is not valid",
|
||||
}}
|
||||
error={errors.email}
|
||||
placeholder="Enter your email ID"
|
||||
placeholder="Enter your email address..."
|
||||
className="border-custom-border-300 h-[46px]"
|
||||
/>
|
||||
</div>
|
||||
<div className="mt-5">
|
||||
<div className="space-y-1">
|
||||
<Input
|
||||
id="password"
|
||||
type="password"
|
||||
@@ -73,46 +84,45 @@ export const EmailPasswordForm: React.FC<Props> = ({ onSubmit }) => {
|
||||
required: "Password is required",
|
||||
}}
|
||||
error={errors.password}
|
||||
placeholder="Enter your password"
|
||||
placeholder="Enter your password..."
|
||||
className="border-custom-border-300 h-[46px]"
|
||||
/>
|
||||
</div>
|
||||
<div className="mt-2 flex items-center justify-between">
|
||||
<div className="ml-auto text-sm">
|
||||
{isSignUpPage ? (
|
||||
<Link href="/">
|
||||
<a className="font-medium text-brand-accent hover:text-brand-accent">
|
||||
Already have an account? Sign in.
|
||||
</a>
|
||||
</Link>
|
||||
) : (
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setIsResettingPassword(true)}
|
||||
className="font-medium text-brand-accent hover:text-brand-accent"
|
||||
>
|
||||
Forgot your password?
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
<div className="text-right text-xs">
|
||||
{isSignUpPage ? (
|
||||
<Link href="/">
|
||||
<a className="text-custom-text-200 hover:text-custom-primary-100">
|
||||
Already have an account? Sign in.
|
||||
</a>
|
||||
</Link>
|
||||
) : (
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setIsResettingPassword(true)}
|
||||
className="text-custom-text-200 hover:text-custom-primary-100"
|
||||
>
|
||||
Forgot your password?
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
<div className="mt-5">
|
||||
<SecondaryButton
|
||||
<div>
|
||||
<PrimaryButton
|
||||
type="submit"
|
||||
className="w-full text-center"
|
||||
className="w-full text-center h-[46px]"
|
||||
disabled={!isValid && isDirty}
|
||||
loading={isSubmitting}
|
||||
>
|
||||
{isSignUpPage
|
||||
? isSubmitting
|
||||
? "Signing up..."
|
||||
: "Sign Up"
|
||||
: "Sign up"
|
||||
: isSubmitting
|
||||
? "Signing in..."
|
||||
: "Sign In"}
|
||||
</SecondaryButton>
|
||||
: "Sign in"}
|
||||
</PrimaryButton>
|
||||
{!isSignUpPage && (
|
||||
<Link href="/sign-up">
|
||||
<a className="block font-medium text-brand-accent hover:text-brand-accent text-sm mt-1">
|
||||
<a className="block text-custom-text-200 hover:text-custom-primary-100 text-xs mt-4">
|
||||
Don{"'"}t have an account? Sign up.
|
||||
</a>
|
||||
</Link>
|
||||
|
||||
@@ -59,32 +59,36 @@ export const EmailResetPasswordForm: React.FC<Props> = ({ setIsResettingPassword
|
||||
};
|
||||
|
||||
return (
|
||||
<form className="mt-5 py-5 px-5" onSubmit={handleSubmit(forgotPassword)}>
|
||||
<div>
|
||||
<form
|
||||
className="space-y-4 mt-10 w-full sm:w-[360px] mx-auto"
|
||||
onSubmit={handleSubmit(forgotPassword)}
|
||||
>
|
||||
<div className="space-y-1">
|
||||
<Input
|
||||
id="email"
|
||||
type="email"
|
||||
name="email"
|
||||
register={register}
|
||||
validations={{
|
||||
required: "Email ID is required",
|
||||
required: "Email address is required",
|
||||
validate: (value) =>
|
||||
/^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/.test(
|
||||
value
|
||||
) || "Email ID is not valid",
|
||||
) || "Email address is not valid",
|
||||
}}
|
||||
error={errors.email}
|
||||
placeholder="Enter registered Email ID"
|
||||
placeholder="Enter registered email address.."
|
||||
className="border-custom-border-300 h-[46px]"
|
||||
/>
|
||||
</div>
|
||||
<div className="mt-5 flex items-center gap-2">
|
||||
<div className="mt-5 flex flex-col-reverse sm:flex-row items-center gap-2">
|
||||
<SecondaryButton
|
||||
className="w-full text-center"
|
||||
className="w-full text-center h-[46px]"
|
||||
onClick={() => setIsResettingPassword(false)}
|
||||
>
|
||||
Go Back
|
||||
</SecondaryButton>
|
||||
<PrimaryButton type="submit" className="w-full text-center" loading={isSubmitting}>
|
||||
<PrimaryButton type="submit" className="w-full text-center h-[46px]" loading={isSubmitting}>
|
||||
{isSubmitting ? "Sending link..." : "Send reset link"}
|
||||
</PrimaryButton>
|
||||
</div>
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
import { useEffect, useState, FC } from "react";
|
||||
|
||||
import Link from "next/link";
|
||||
import Image from "next/image";
|
||||
import { useRouter } from "next/router";
|
||||
|
||||
// next-themes
|
||||
import { useTheme } from "next-themes";
|
||||
// images
|
||||
import githubImage from "/public/logos/github-black.png";
|
||||
import githubBlackImage from "/public/logos/github-black.png";
|
||||
import githubWhiteImage from "/public/logos/github-white.png";
|
||||
|
||||
const { NEXT_PUBLIC_GITHUB_ID } = process.env;
|
||||
|
||||
@@ -11,15 +16,15 @@ export interface GithubLoginButtonProps {
|
||||
handleSignIn: React.Dispatch<string>;
|
||||
}
|
||||
|
||||
export const GithubLoginButton: FC<GithubLoginButtonProps> = (props) => {
|
||||
const { handleSignIn } = props;
|
||||
// router
|
||||
export const GithubLoginButton: FC<GithubLoginButtonProps> = ({ handleSignIn }) => {
|
||||
const [loginCallBackURL, setLoginCallBackURL] = useState(undefined);
|
||||
const [gitCode, setGitCode] = useState<null | string>(null);
|
||||
|
||||
const {
|
||||
query: { code },
|
||||
} = useRouter();
|
||||
// states
|
||||
const [loginCallBackURL, setLoginCallBackURL] = useState(undefined);
|
||||
const [gitCode, setGitCode] = useState<null | string>(null);
|
||||
|
||||
const { theme } = useTheme();
|
||||
|
||||
useEffect(() => {
|
||||
if (code && !gitCode) {
|
||||
@@ -35,13 +40,18 @@ export const GithubLoginButton: FC<GithubLoginButtonProps> = (props) => {
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="w-full flex justify-center items-center px-[3px]">
|
||||
<div className="w-full flex justify-center items-center">
|
||||
<Link
|
||||
href={`https://github.com/login/oauth/authorize?client_id=${NEXT_PUBLIC_GITHUB_ID}&redirect_uri=${loginCallBackURL}&scope=read:user,user:email`}
|
||||
>
|
||||
<button className="flex w-full items-center justify-center gap-3 rounded border border-brand-base p-2 text-sm font-medium text-brand-secondary duration-300 hover:bg-brand-surface-2">
|
||||
<Image src={githubImage} height={20} width={20} color="#000" alt="GitHub Logo" />
|
||||
<span>Sign In with Github</span>
|
||||
<button className="flex w-full items-center justify-center gap-2 rounded border border-custom-border-300 p-2 text-sm font-medium text-custom-text-100 duration-300 hover:bg-custom-background-80 h-[46px]">
|
||||
<Image
|
||||
src={theme === "dark" ? githubWhiteImage : githubBlackImage}
|
||||
height={20}
|
||||
width={20}
|
||||
alt="GitHub Logo"
|
||||
/>
|
||||
<span>Sign in with GitHub</span>
|
||||
</button>
|
||||
</Link>
|
||||
</div>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { FC, CSSProperties, useEffect, useRef, useCallback, useState } from "react";
|
||||
// next
|
||||
|
||||
import Script from "next/script";
|
||||
|
||||
export interface IGoogleLoginButton {
|
||||
@@ -8,30 +8,36 @@ export interface IGoogleLoginButton {
|
||||
styles?: CSSProperties;
|
||||
}
|
||||
|
||||
export const GoogleLoginButton: FC<IGoogleLoginButton> = (props) => {
|
||||
const { handleSignIn } = props;
|
||||
|
||||
export const GoogleLoginButton: FC<IGoogleLoginButton> = ({ handleSignIn }) => {
|
||||
const googleSignInButton = useRef<HTMLDivElement>(null);
|
||||
const [gsiScriptLoaded, setGsiScriptLoaded] = useState(false);
|
||||
|
||||
const loadScript = useCallback(() => {
|
||||
if (!googleSignInButton.current || gsiScriptLoaded) return;
|
||||
|
||||
window?.google?.accounts.id.initialize({
|
||||
client_id: process.env.NEXT_PUBLIC_GOOGLE_CLIENTID || "",
|
||||
callback: handleSignIn,
|
||||
});
|
||||
window?.google?.accounts.id.renderButton(
|
||||
googleSignInButton.current,
|
||||
{
|
||||
type: "standard",
|
||||
theme: "outline",
|
||||
size: "large",
|
||||
logo_alignment: "center",
|
||||
width: "410",
|
||||
text: "continue_with",
|
||||
} as GsiButtonConfiguration // customization attributes
|
||||
);
|
||||
|
||||
try {
|
||||
window?.google?.accounts.id.renderButton(
|
||||
googleSignInButton.current,
|
||||
{
|
||||
type: "standard",
|
||||
theme: "outline",
|
||||
size: "large",
|
||||
logo_alignment: "center",
|
||||
width: 360,
|
||||
text: "signin_with",
|
||||
} as GsiButtonConfiguration // customization attributes
|
||||
);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
|
||||
window?.google?.accounts.id.prompt(); // also display the One Tap dialog
|
||||
|
||||
setGsiScriptLoaded(true);
|
||||
}, [handleSignIn, gsiScriptLoaded]);
|
||||
|
||||
@@ -48,7 +54,7 @@ export const GoogleLoginButton: FC<IGoogleLoginButton> = (props) => {
|
||||
<>
|
||||
<Script src="https://accounts.google.com/gsi/client" async defer onLoad={loadScript} />
|
||||
<div
|
||||
className="overflow-hidden rounded w-full flex justify-center items-center"
|
||||
className="overflow-hidden rounded w-full"
|
||||
id="googleSignInButton"
|
||||
ref={googleSignInButton}
|
||||
/>
|
||||
|
||||
@@ -97,7 +97,7 @@ export const CreateUpdateAnalyticsModal: React.FC<Props> = ({ isOpen, handleClos
|
||||
leaveFrom="opacity-100"
|
||||
leaveTo="opacity-0"
|
||||
>
|
||||
<div className="fixed inset-0 bg-brand-backdrop bg-opacity-50 transition-opacity" />
|
||||
<div className="fixed inset-0 bg-custom-backdrop bg-opacity-50 transition-opacity" />
|
||||
</Transition.Child>
|
||||
|
||||
<div className="fixed inset-0 z-10 overflow-y-auto">
|
||||
@@ -111,10 +111,13 @@ export const CreateUpdateAnalyticsModal: React.FC<Props> = ({ isOpen, handleClos
|
||||
leaveFrom="opacity-100 translate-y-0 sm:scale-100"
|
||||
leaveTo="opacity-0 translate-y-4 sm:translate-y-0 sm:scale-95"
|
||||
>
|
||||
<Dialog.Panel className="relative transform rounded-lg border border-brand-base bg-brand-base px-4 pt-5 pb-4 text-left shadow-xl transition-all sm:my-8 sm:w-full sm:max-w-2xl sm:p-6">
|
||||
<Dialog.Panel className="relative transform rounded-lg border border-custom-border-200 bg-custom-background-100 px-4 pt-5 pb-4 text-left shadow-xl transition-all sm:my-8 sm:w-full sm:max-w-2xl sm:p-6">
|
||||
<form onSubmit={handleSubmit(onSubmit)}>
|
||||
<div>
|
||||
<Dialog.Title as="h3" className="text-lg font-medium leading-6 text-brand-base">
|
||||
<Dialog.Title
|
||||
as="h3"
|
||||
className="text-lg font-medium leading-6 text-custom-text-100"
|
||||
>
|
||||
Save Analytics
|
||||
</Dialog.Title>
|
||||
<div className="mt-5">
|
||||
|
||||
@@ -61,7 +61,7 @@ export const CustomAnalytics: React.FC<Props> = ({
|
||||
<AnalyticsSelectBar
|
||||
control={control}
|
||||
setValue={setValue}
|
||||
projects={projects}
|
||||
projects={projects ?? []}
|
||||
params={params}
|
||||
fullScreen={fullScreen}
|
||||
isProjectLevel={isProjectLevel}
|
||||
@@ -86,7 +86,7 @@ export const CustomAnalytics: React.FC<Props> = ({
|
||||
</div>
|
||||
) : (
|
||||
<div className="grid h-full place-items-center p-5">
|
||||
<div className="space-y-4 text-brand-secondary">
|
||||
<div className="space-y-4 text-custom-text-200">
|
||||
<p className="text-sm">No matching issues found. Try changing the parameters.</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -104,7 +104,7 @@ export const CustomAnalytics: React.FC<Props> = ({
|
||||
)
|
||||
) : (
|
||||
<div className="grid h-full place-items-center p-5">
|
||||
<div className="space-y-4 text-brand-secondary">
|
||||
<div className="space-y-4 text-custom-text-200">
|
||||
<p className="text-sm">There was some error in fetching the data.</p>
|
||||
<div className="flex items-center justify-center gap-2">
|
||||
<PrimaryButton
|
||||
|
||||
@@ -14,24 +14,24 @@ type Props = {
|
||||
export const CustomTooltip: React.FC<Props> = ({ datum, analytics, params }) => {
|
||||
let tooltipValue: string | number = "";
|
||||
|
||||
const renderAssigneeName = (assigneeId: string): string => {
|
||||
const assignee = analytics.extras.assignee_details.find((a) => a.assignees__id === assigneeId);
|
||||
|
||||
if (!assignee) return "No assignee";
|
||||
|
||||
return assignee.assignees__display_name || "No assignee";
|
||||
};
|
||||
|
||||
if (params.segment) {
|
||||
if (DATE_KEYS.includes(params.segment)) tooltipValue = renderMonthAndYear(datum.id);
|
||||
else if (params.segment === "assignees__email") {
|
||||
const assignee = analytics.extras.assignee_details.find(
|
||||
(a) => a.assignees__email === datum.id
|
||||
);
|
||||
|
||||
if (assignee)
|
||||
tooltipValue = assignee.assignees__first_name + " " + assignee.assignees__last_name;
|
||||
else tooltipValue = "No assignees";
|
||||
} else tooltipValue = datum.id;
|
||||
else tooltipValue = datum.id;
|
||||
} else {
|
||||
if (DATE_KEYS.includes(params.x_axis)) tooltipValue = datum.indexValue;
|
||||
else tooltipValue = datum.id === "count" ? "Issue count" : "Estimate";
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex items-center gap-2 rounded-md border border-brand-base bg-brand-surface-2 p-2 text-xs">
|
||||
<div className="flex items-center gap-2 rounded-md border border-custom-border-200 bg-custom-background-80 p-2 text-xs">
|
||||
<span
|
||||
className="h-3 w-3 rounded"
|
||||
style={{
|
||||
@@ -39,7 +39,7 @@ export const CustomTooltip: React.FC<Props> = ({ datum, analytics, params }) =>
|
||||
}}
|
||||
/>
|
||||
<span
|
||||
className={`font-medium text-brand-secondary ${
|
||||
className={`font-medium text-custom-text-200 ${
|
||||
params.segment
|
||||
? params.segment === "priority" || params.segment === "state__group"
|
||||
? "capitalize"
|
||||
@@ -49,7 +49,10 @@ export const CustomTooltip: React.FC<Props> = ({ datum, analytics, params }) =>
|
||||
: ""
|
||||
}`}
|
||||
>
|
||||
{tooltipValue}:
|
||||
{params.segment === "assignees__id"
|
||||
? renderAssigneeName(tooltipValue.toString())
|
||||
: tooltipValue}
|
||||
:
|
||||
</span>
|
||||
<span>{datum.value}</span>
|
||||
</div>
|
||||
|
||||
@@ -29,6 +29,14 @@ export const AnalyticsGraph: React.FC<Props> = ({
|
||||
yAxisKey,
|
||||
fullScreen,
|
||||
}) => {
|
||||
const renderAssigneeName = (assigneeId: string): string => {
|
||||
const assignee = analytics.extras.assignee_details.find((a) => a.assignees__id === assigneeId);
|
||||
|
||||
if (!assignee) return "?";
|
||||
|
||||
return assignee.assignees__display_name || "?";
|
||||
};
|
||||
|
||||
const generateYAxisTickValues = () => {
|
||||
if (!analytics) return [];
|
||||
|
||||
@@ -70,17 +78,17 @@ export const AnalyticsGraph: React.FC<Props> = ({
|
||||
height={fullScreen ? "400px" : "300px"}
|
||||
margin={{
|
||||
right: 20,
|
||||
bottom: params.x_axis === "assignees__email" ? 50 : longestXAxisLabel.length * 5 + 20,
|
||||
bottom: params.x_axis === "assignees__id" ? 50 : longestXAxisLabel.length * 5 + 20,
|
||||
}}
|
||||
axisBottom={{
|
||||
tickSize: 0,
|
||||
tickPadding: 10,
|
||||
tickRotation: barGraphData.data.length > 7 ? -45 : 0,
|
||||
renderTick:
|
||||
params.x_axis === "assignees__email"
|
||||
params.x_axis === "assignees__id"
|
||||
? (datum) => {
|
||||
const avatar = analytics.extras.assignee_details?.find(
|
||||
(a) => a?.assignees__email === datum?.value
|
||||
(a) => a?.assignees__display_name === datum?.value
|
||||
)?.assignees__avatar;
|
||||
|
||||
if (avatar && avatar !== "")
|
||||
@@ -101,7 +109,11 @@ export const AnalyticsGraph: React.FC<Props> = ({
|
||||
<g transform={`translate(${datum.x},${datum.y})`}>
|
||||
<circle cy={18} r={8} fill="#374151" />
|
||||
<text x={0} y={21} textAnchor="middle" fontSize={9} fill="#ffffff">
|
||||
{datum.value && datum.value !== "None"
|
||||
{params.x_axis === "assignees__id"
|
||||
? datum.value && datum.value !== "None"
|
||||
? renderAssigneeName(datum.value)[0].toUpperCase()
|
||||
: "?"
|
||||
: datum.value && datum.value !== "None"
|
||||
? `${datum.value}`.toUpperCase()[0]
|
||||
: "?"}
|
||||
</text>
|
||||
@@ -111,7 +123,6 @@ export const AnalyticsGraph: React.FC<Props> = ({
|
||||
: undefined,
|
||||
}}
|
||||
theme={{
|
||||
background: "rgb(var(--color-bg-base))",
|
||||
axis: {},
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -29,7 +29,7 @@ export const AnalyticsSelectBar: React.FC<Props> = ({
|
||||
>
|
||||
{!isProjectLevel && (
|
||||
<div>
|
||||
<h6 className="text-xs text-brand-secondary">Project</h6>
|
||||
<h6 className="text-xs text-custom-text-200">Project</h6>
|
||||
<Controller
|
||||
name="project"
|
||||
control={control}
|
||||
@@ -40,7 +40,7 @@ export const AnalyticsSelectBar: React.FC<Props> = ({
|
||||
</div>
|
||||
)}
|
||||
<div>
|
||||
<h6 className="text-xs text-brand-secondary">Measure (y-axis)</h6>
|
||||
<h6 className="text-xs text-custom-text-200">Measure (y-axis)</h6>
|
||||
<Controller
|
||||
name="y_axis"
|
||||
control={control}
|
||||
@@ -50,7 +50,7 @@ export const AnalyticsSelectBar: React.FC<Props> = ({
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<h6 className="text-xs text-brand-secondary">Dimension (x-axis)</h6>
|
||||
<h6 className="text-xs text-custom-text-200">Dimension (x-axis)</h6>
|
||||
<Controller
|
||||
name="x_axis"
|
||||
control={control}
|
||||
@@ -67,7 +67,7 @@ export const AnalyticsSelectBar: React.FC<Props> = ({
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<h6 className="text-xs text-brand-secondary">Group</h6>
|
||||
<h6 className="text-xs text-custom-text-200">Group</h6>
|
||||
<Controller
|
||||
name="segment"
|
||||
control={control}
|
||||
|
||||
@@ -23,13 +23,14 @@ import {
|
||||
import { ContrastIcon, LayerDiagonalIcon } from "components/icons";
|
||||
// helpers
|
||||
import { renderShortDate } from "helpers/date-time.helper";
|
||||
import { renderEmoji } from "helpers/emoji.helper";
|
||||
import { truncateText } from "helpers/string.helper";
|
||||
// types
|
||||
import {
|
||||
IAnalyticsParams,
|
||||
IAnalyticsResponse,
|
||||
ICurrentUserResponse,
|
||||
IExportAnalyticsFormData,
|
||||
IProject,
|
||||
IWorkspace,
|
||||
} from "types";
|
||||
// fetch-keys
|
||||
@@ -178,23 +179,23 @@ export const AnalyticsSidebar: React.FC<Props> = ({
|
||||
};
|
||||
|
||||
const selectedProjects =
|
||||
params.project && params.project.length > 0 ? params.project : projects.map((p) => p.id);
|
||||
params.project && params.project.length > 0 ? params.project : projects?.map((p) => p.id);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`px-5 py-2.5 flex items-center justify-between space-y-2 ${
|
||||
fullScreen
|
||||
? "border-l border-brand-base md:h-full md:border-l md:border-brand-base md:space-y-4 overflow-hidden md:flex-col md:items-start md:py-5"
|
||||
? "border-l border-custom-border-200 md:h-full md:border-l md:border-custom-border-200 md:space-y-4 overflow-hidden md:flex-col md:items-start md:py-5"
|
||||
: ""
|
||||
}`}
|
||||
>
|
||||
<div className="flex items-center gap-2 flex-wrap">
|
||||
<div className="flex items-center gap-1 bg-brand-surface-2 rounded-md px-3 py-1 text-brand-secondary text-xs">
|
||||
<div className="flex items-center gap-1 bg-custom-background-80 rounded-md px-3 py-1 text-custom-text-200 text-xs">
|
||||
<LayerDiagonalIcon height={14} width={14} />
|
||||
{analytics ? analytics.total : "..."} Issues
|
||||
</div>
|
||||
{isProjectLevel && (
|
||||
<div className="flex items-center gap-1 bg-brand-surface-2 rounded-md px-3 py-1 text-brand-secondary text-xs">
|
||||
<div className="flex items-center gap-1 bg-custom-background-80 rounded-md px-3 py-1 text-custom-text-200 text-xs">
|
||||
<CalendarDaysIcon className="h-3.5 w-3.5" />
|
||||
{renderShortDate(
|
||||
(cycleId
|
||||
@@ -206,7 +207,7 @@ export const AnalyticsSidebar: React.FC<Props> = ({
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className="h-full overflow-hidden">
|
||||
<div className="h-full w-full overflow-hidden">
|
||||
{fullScreen ? (
|
||||
<>
|
||||
{!isProjectLevel && selectedProjects && selectedProjects.length > 0 && (
|
||||
@@ -214,61 +215,57 @@ export const AnalyticsSidebar: React.FC<Props> = ({
|
||||
<h4 className="font-medium">Selected Projects</h4>
|
||||
<div className="space-y-6 mt-4 h-full overflow-y-auto">
|
||||
{selectedProjects.map((projectId) => {
|
||||
const project: IProject = projects.find((p) => p.id === projectId);
|
||||
const project = projects?.find((p) => p.id === projectId);
|
||||
|
||||
return (
|
||||
<div key={project.id}>
|
||||
<div className="text-sm flex items-center gap-1">
|
||||
{project.emoji ? (
|
||||
<span className="grid h-6 w-6 flex-shrink-0 place-items-center">
|
||||
{String.fromCodePoint(parseInt(project.emoji))}
|
||||
</span>
|
||||
) : project.icon_prop ? (
|
||||
<div className="h-6 w-6 grid place-items-center flex-shrink-0">
|
||||
<span
|
||||
style={{ color: project.icon_prop.color }}
|
||||
className="material-symbols-rounded text-lg"
|
||||
>
|
||||
{project.icon_prop.name}
|
||||
if (project)
|
||||
return (
|
||||
<div key={project.id} className="w-full">
|
||||
<div className="text-sm flex items-center gap-1">
|
||||
{project.emoji ? (
|
||||
<span className="grid h-6 w-6 flex-shrink-0 place-items-center">
|
||||
{renderEmoji(project.emoji)}
|
||||
</span>
|
||||
</div>
|
||||
) : (
|
||||
<span className="grid h-6 w-6 mr-1 flex-shrink-0 place-items-center rounded bg-gray-700 uppercase text-white">
|
||||
{project?.name.charAt(0)}
|
||||
</span>
|
||||
)}
|
||||
<h5 className="break-words">
|
||||
{project.name}
|
||||
<span className="text-brand-secondary text-xs ml-1">
|
||||
({project.identifier})
|
||||
</span>
|
||||
</h5>
|
||||
</div>
|
||||
<div className="mt-4 space-y-3 pl-2">
|
||||
<div className="flex items-center justify-between gap-2 text-xs">
|
||||
<div className="flex items-center gap-2">
|
||||
<UserGroupIcon className="h-4 w-4 text-brand-secondary" />
|
||||
<h6>Total members</h6>
|
||||
</div>
|
||||
<span className="text-brand-secondary">{project.total_members}</span>
|
||||
) : project.icon_prop ? (
|
||||
<div className="h-6 w-6 grid place-items-center flex-shrink-0">
|
||||
{renderEmoji(project.icon_prop)}
|
||||
</div>
|
||||
) : (
|
||||
<span className="grid h-6 w-6 mr-1 flex-shrink-0 place-items-center rounded bg-gray-700 uppercase text-white">
|
||||
{project?.name.charAt(0)}
|
||||
</span>
|
||||
)}
|
||||
<h5 className="flex items-center gap-1">
|
||||
<p className="break-words">{truncateText(project.name, 20)}</p>
|
||||
<span className="text-custom-text-200 text-xs ml-1">
|
||||
({project.identifier})
|
||||
</span>
|
||||
</h5>
|
||||
</div>
|
||||
<div className="flex items-center justify-between gap-2 text-xs">
|
||||
<div className="flex items-center gap-2">
|
||||
<ContrastIcon height={16} width={16} />
|
||||
<h6>Total cycles</h6>
|
||||
<div className="mt-4 space-y-3 pl-2 w-full">
|
||||
<div className="flex items-center justify-between gap-2 text-xs">
|
||||
<div className="flex items-center gap-2">
|
||||
<UserGroupIcon className="h-4 w-4 text-custom-text-200" />
|
||||
<h6>Total members</h6>
|
||||
</div>
|
||||
<span className="text-custom-text-200">{project.total_members}</span>
|
||||
</div>
|
||||
<span className="text-brand-secondary">{project.total_cycles}</span>
|
||||
</div>
|
||||
<div className="flex items-center justify-between gap-2 text-xs">
|
||||
<div className="flex items-center gap-2">
|
||||
<UserGroupIcon className="h-4 w-4 text-brand-secondary" />
|
||||
<h6>Total modules</h6>
|
||||
<div className="flex items-center justify-between gap-2 text-xs">
|
||||
<div className="flex items-center gap-2">
|
||||
<ContrastIcon height={16} width={16} />
|
||||
<h6>Total cycles</h6>
|
||||
</div>
|
||||
<span className="text-custom-text-200">{project.total_cycles}</span>
|
||||
</div>
|
||||
<div className="flex items-center justify-between gap-2 text-xs">
|
||||
<div className="flex items-center gap-2">
|
||||
<UserGroupIcon className="h-4 w-4 text-custom-text-200" />
|
||||
<h6>Total modules</h6>
|
||||
</div>
|
||||
<span className="text-custom-text-200">{project.total_modules}</span>
|
||||
</div>
|
||||
<span className="text-brand-secondary">{project.total_modules}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
@@ -279,13 +276,11 @@ export const AnalyticsSidebar: React.FC<Props> = ({
|
||||
<h4 className="font-medium break-words">Analytics for {cycleDetails.name}</h4>
|
||||
<div className="space-y-4 mt-4">
|
||||
<div className="flex items-center gap-2 text-xs">
|
||||
<h6 className="text-brand-secondary">Lead</h6>
|
||||
<span>
|
||||
{cycleDetails.owned_by?.first_name} {cycleDetails.owned_by?.last_name}
|
||||
</span>
|
||||
<h6 className="text-custom-text-200">Lead</h6>
|
||||
<span>{cycleDetails.owned_by?.display_name}</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2 text-xs">
|
||||
<h6 className="text-brand-secondary">Start Date</h6>
|
||||
<h6 className="text-custom-text-200">Start Date</h6>
|
||||
<span>
|
||||
{cycleDetails.start_date && cycleDetails.start_date !== ""
|
||||
? renderShortDate(cycleDetails.start_date)
|
||||
@@ -293,7 +288,7 @@ export const AnalyticsSidebar: React.FC<Props> = ({
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2 text-xs">
|
||||
<h6 className="text-brand-secondary">Target Date</h6>
|
||||
<h6 className="text-custom-text-200">Target Date</h6>
|
||||
<span>
|
||||
{cycleDetails.end_date && cycleDetails.end_date !== ""
|
||||
? renderShortDate(cycleDetails.end_date)
|
||||
@@ -307,14 +302,11 @@ export const AnalyticsSidebar: React.FC<Props> = ({
|
||||
<h4 className="font-medium break-words">Analytics for {moduleDetails.name}</h4>
|
||||
<div className="space-y-4 mt-4">
|
||||
<div className="flex items-center gap-2 text-xs">
|
||||
<h6 className="text-brand-secondary">Lead</h6>
|
||||
<span>
|
||||
{moduleDetails.lead_detail?.first_name}{" "}
|
||||
{moduleDetails.lead_detail?.last_name}
|
||||
</span>
|
||||
<h6 className="text-custom-text-200">Lead</h6>
|
||||
<span>{moduleDetails.lead_detail?.display_name}</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2 text-xs">
|
||||
<h6 className="text-brand-secondary">Start Date</h6>
|
||||
<h6 className="text-custom-text-200">Start Date</h6>
|
||||
<span>
|
||||
{moduleDetails.start_date && moduleDetails.start_date !== ""
|
||||
? renderShortDate(moduleDetails.start_date)
|
||||
@@ -322,7 +314,7 @@ export const AnalyticsSidebar: React.FC<Props> = ({
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2 text-xs">
|
||||
<h6 className="text-brand-secondary">Target Date</h6>
|
||||
<h6 className="text-custom-text-200">Target Date</h6>
|
||||
<span>
|
||||
{moduleDetails.target_date && moduleDetails.target_date !== ""
|
||||
? renderShortDate(moduleDetails.target_date)
|
||||
@@ -336,16 +328,11 @@ export const AnalyticsSidebar: React.FC<Props> = ({
|
||||
<div className="flex items-center gap-1">
|
||||
{projectDetails?.emoji ? (
|
||||
<div className="grid h-6 w-6 flex-shrink-0 place-items-center">
|
||||
{String.fromCodePoint(parseInt(projectDetails.emoji))}
|
||||
{renderEmoji(projectDetails.emoji)}
|
||||
</div>
|
||||
) : projectDetails?.icon_prop ? (
|
||||
<div className="h-6 w-6 grid place-items-center flex-shrink-0">
|
||||
<span
|
||||
style={{ color: projectDetails.icon_prop.color }}
|
||||
className="material-symbols-rounded text-lg"
|
||||
>
|
||||
{projectDetails.icon_prop.name}
|
||||
</span>
|
||||
{renderEmoji(projectDetails.icon_prop)}
|
||||
</div>
|
||||
) : (
|
||||
<span className="grid h-6 w-6 mr-1 flex-shrink-0 place-items-center rounded bg-gray-700 uppercase text-white">
|
||||
@@ -356,13 +343,10 @@ export const AnalyticsSidebar: React.FC<Props> = ({
|
||||
</div>
|
||||
<div className="space-y-4 mt-4">
|
||||
<div className="flex items-center gap-2 text-xs">
|
||||
<h6 className="text-brand-secondary">Network</h6>
|
||||
<h6 className="text-custom-text-200">Network</h6>
|
||||
<span>
|
||||
{
|
||||
NETWORK_CHOICES[
|
||||
`${projectDetails?.network}` as keyof typeof NETWORK_CHOICES
|
||||
]
|
||||
}
|
||||
{NETWORK_CHOICES.find((n) => n.key === projectDetails?.network)?.label ??
|
||||
""}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -22,24 +22,21 @@ type Props = {
|
||||
};
|
||||
|
||||
export const AnalyticsTable: React.FC<Props> = ({ analytics, barGraphData, params, yAxisKey }) => {
|
||||
const renderAssigneeName = (email: string): string => {
|
||||
const assignee = analytics.extras.assignee_details.find((a) => a.assignees__email === email);
|
||||
const renderAssigneeName = (assigneeId: string): string => {
|
||||
const assignee = analytics.extras.assignee_details.find((a) => a.assignees__id === assigneeId);
|
||||
|
||||
if (!assignee) return "No assignee";
|
||||
|
||||
if (assignee.assignees__first_name !== "")
|
||||
return assignee.assignees__first_name + " " + assignee.assignees__last_name;
|
||||
|
||||
return email;
|
||||
return assignee.assignees__display_name || "No assignee";
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flow-root">
|
||||
<div className="overflow-x-auto">
|
||||
<div className="inline-block min-w-full align-middle">
|
||||
<table className="min-w-full divide-y divide-brand-base whitespace-nowrap border-y border-brand-base">
|
||||
<thead className="bg-brand-surface-2">
|
||||
<tr className="divide-x divide-brand-base text-sm text-brand-base">
|
||||
<table className="min-w-full divide-y divide-custom-border-200 whitespace-nowrap border-y border-custom-border-200">
|
||||
<thead className="bg-custom-background-80">
|
||||
<tr className="divide-x divide-custom-border-200 text-sm text-custom-text-100">
|
||||
<th scope="col" className="py-3 px-2.5 text-left font-medium">
|
||||
{ANALYTICS_X_AXIS_VALUES.find((v) => v.value === params.x_axis)?.label}
|
||||
</th>
|
||||
@@ -65,10 +62,10 @@ export const AnalyticsTable: React.FC<Props> = ({ analytics, barGraphData, param
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
{DATE_KEYS.includes(params.segment ?? "")
|
||||
? renderMonthAndYear(key)
|
||||
: params.segment === "assignees__email"
|
||||
{params.segment === "assignees__id"
|
||||
? renderAssigneeName(key)
|
||||
: DATE_KEYS.includes(params.segment ?? "")
|
||||
? renderMonthAndYear(key)
|
||||
: key}
|
||||
</div>
|
||||
</th>
|
||||
@@ -80,11 +77,11 @@ export const AnalyticsTable: React.FC<Props> = ({ analytics, barGraphData, param
|
||||
)}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody className="divide-y divide-brand-base">
|
||||
<tbody className="divide-y divide-custom-border-200">
|
||||
{barGraphData.data.map((item, index) => (
|
||||
<tr
|
||||
key={`table-row-${index}`}
|
||||
className="divide-x divide-brand-base text-xs text-brand-secondary"
|
||||
className="divide-x divide-custom-border-200 text-xs text-custom-text-200"
|
||||
>
|
||||
<td
|
||||
className={`flex items-center gap-2 whitespace-nowrap py-2 px-2.5 font-medium ${
|
||||
@@ -108,7 +105,7 @@ export const AnalyticsTable: React.FC<Props> = ({ analytics, barGraphData, param
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
{params.x_axis === "assignees__email"
|
||||
{params.x_axis === "assignees__id"
|
||||
? renderAssigneeName(`${item.name}`)
|
||||
: addSpaceIfCamelCase(`${item.name}`)}
|
||||
</td>
|
||||
|
||||
@@ -150,16 +150,16 @@ export const AnalyticsProjectModal: React.FC<Props> = ({ isOpen, onClose }) => {
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`absolute top-0 z-30 h-full bg-brand-surface-1 ${
|
||||
className={`absolute top-0 z-30 h-full bg-custom-background-90 ${
|
||||
fullScreen ? "p-2 w-full" : "w-1/2"
|
||||
} ${isOpen ? "right-0" : "-right-full"} duration-300 transition-all`}
|
||||
>
|
||||
<div
|
||||
className={`flex h-full flex-col overflow-hidden border-brand-base bg-brand-base text-left ${
|
||||
className={`flex h-full flex-col overflow-hidden border-custom-border-200 bg-custom-background-100 text-left ${
|
||||
fullScreen ? "rounded-lg border" : "border-l"
|
||||
}`}
|
||||
>
|
||||
<div className="flex items-center justify-between gap-4 bg-brand-base px-5 py-4 text-sm">
|
||||
<div className="flex items-center justify-between gap-4 bg-custom-background-100 px-5 py-4 text-sm">
|
||||
<h3 className="break-words">
|
||||
Analytics for{" "}
|
||||
{cycleId ? cycleDetails?.name : moduleId ? moduleDetails?.name : projectDetails?.name}
|
||||
@@ -167,7 +167,7 @@ export const AnalyticsProjectModal: React.FC<Props> = ({ isOpen, onClose }) => {
|
||||
<div className="flex items-center gap-2">
|
||||
<button
|
||||
type="button"
|
||||
className="grid place-items-center p-1 text-brand-secondary hover:text-brand-base"
|
||||
className="grid place-items-center p-1 text-custom-text-200 hover:text-custom-text-100"
|
||||
onClick={() => setFullScreen((prevData) => !prevData)}
|
||||
>
|
||||
{fullScreen ? (
|
||||
@@ -178,7 +178,7 @@ export const AnalyticsProjectModal: React.FC<Props> = ({ isOpen, onClose }) => {
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
className="grid place-items-center p-1 text-brand-secondary hover:text-brand-base"
|
||||
className="grid place-items-center p-1 text-custom-text-200 hover:text-custom-text-100"
|
||||
onClick={handleClose}
|
||||
>
|
||||
<XMarkIcon className="h-4 w-4" />
|
||||
@@ -186,13 +186,13 @@ export const AnalyticsProjectModal: React.FC<Props> = ({ isOpen, onClose }) => {
|
||||
</div>
|
||||
</div>
|
||||
<Tab.Group as={Fragment}>
|
||||
<Tab.List as="div" className="space-x-2 border-b border-brand-base p-5 pt-0">
|
||||
<Tab.List as="div" className="space-x-2 border-b border-custom-border-200 p-5 pt-0">
|
||||
{tabsList.map((tab) => (
|
||||
<Tab
|
||||
key={tab}
|
||||
className={({ selected }) =>
|
||||
`rounded-3xl border border-brand-base px-4 py-2 text-xs hover:bg-brand-surface-2 ${
|
||||
selected ? "bg-brand-surface-2" : ""
|
||||
`rounded-3xl border border-custom-border-200 px-4 py-2 text-xs hover:bg-custom-background-80 ${
|
||||
selected ? "bg-custom-background-80" : ""
|
||||
}`
|
||||
}
|
||||
onClick={() => trackAnalyticsEvent(tab)}
|
||||
|
||||
@@ -10,10 +10,10 @@ type Props = {
|
||||
};
|
||||
|
||||
export const AnalyticsDemand: React.FC<Props> = ({ defaultAnalytics }) => (
|
||||
<div className="space-y-3 rounded-[10px] border border-brand-base p-3">
|
||||
<div className="space-y-3 rounded-[10px] border border-custom-border-200 p-3">
|
||||
<h5 className="text-xs text-red-500">DEMAND</h5>
|
||||
<div>
|
||||
<h4 className="text-brand-bas text-base font-medium">Total open tasks</h4>
|
||||
<h4 className="text-custom-text-100 text-base font-medium">Total open tasks</h4>
|
||||
<h3 className="mt-1 text-xl font-semibold">{defaultAnalytics.open_issues}</h3>
|
||||
</div>
|
||||
<div className="space-y-6">
|
||||
@@ -31,13 +31,13 @@ export const AnalyticsDemand: React.FC<Props> = ({ defaultAnalytics }) => (
|
||||
}}
|
||||
/>
|
||||
<h6 className="capitalize">{group.state_group}</h6>
|
||||
<span className="ml-1 rounded-3xl bg-brand-surface-2 px-2 py-0.5 text-[0.65rem] text-brand-secondary">
|
||||
<span className="ml-1 rounded-3xl bg-custom-background-80 px-2 py-0.5 text-[0.65rem] text-custom-text-200">
|
||||
{group.state_count}
|
||||
</span>
|
||||
</div>
|
||||
<p className="text-brand-secondary">{percentage}%</p>
|
||||
<p className="text-custom-text-200">{percentage}%</p>
|
||||
</div>
|
||||
<div className="bar relative h-1 w-full rounded bg-brand-surface-2">
|
||||
<div className="bar relative h-1 w-full rounded bg-custom-background-80">
|
||||
<div
|
||||
className="absolute top-0 left-0 h-1 rounded duration-300"
|
||||
style={{
|
||||
@@ -50,8 +50,8 @@ export const AnalyticsDemand: React.FC<Props> = ({ defaultAnalytics }) => (
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
<div className="!mt-6 flex w-min items-center gap-2 whitespace-nowrap rounded-md border border-brand-base bg-brand-surface-2 p-2 text-xs">
|
||||
<p className="flex items-center gap-1 text-brand-secondary">
|
||||
<div className="!mt-6 flex w-min items-center gap-2 whitespace-nowrap rounded-md border border-custom-border-200 bg-custom-background-80 p-2 text-xs">
|
||||
<p className="flex items-center gap-1 text-custom-text-200">
|
||||
<PlayIcon className="h-4 w-4 -rotate-90" aria-hidden="true" />
|
||||
<span>Estimate Demand:</span>
|
||||
</p>
|
||||
|
||||
@@ -1,22 +1,38 @@
|
||||
// ui
|
||||
import { ProfileEmptyState } from "components/ui";
|
||||
// image
|
||||
import emptyUsers from "public/empty-state/empty_users.svg";
|
||||
|
||||
type Props = {
|
||||
users: {
|
||||
avatar: string | null;
|
||||
email: string | null;
|
||||
display_name: string | null;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
count: number;
|
||||
id: string;
|
||||
}[];
|
||||
title: string;
|
||||
emptyStateMessage: string;
|
||||
workspaceSlug: string;
|
||||
};
|
||||
|
||||
export const AnalyticsLeaderboard: React.FC<Props> = ({ users, title }) => (
|
||||
<div className="p-3 border border-brand-base rounded-[10px]">
|
||||
export const AnalyticsLeaderboard: React.FC<Props> = ({
|
||||
users,
|
||||
title,
|
||||
emptyStateMessage,
|
||||
workspaceSlug,
|
||||
}) => (
|
||||
<div className="p-3 border border-custom-border-200 rounded-[10px]">
|
||||
<h6 className="text-base font-medium">{title}</h6>
|
||||
{users.length > 0 ? (
|
||||
<div className="mt-3 space-y-3">
|
||||
{users.map((user) => (
|
||||
<div
|
||||
key={user.email ?? "None"}
|
||||
<a
|
||||
key={user.display_name ?? "None"}
|
||||
href={`/${workspaceSlug}/profile/${user.id}`}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="flex items-start justify-between gap-4 text-xs"
|
||||
>
|
||||
<div className="flex items-center gap-2">
|
||||
@@ -25,24 +41,26 @@ export const AnalyticsLeaderboard: React.FC<Props> = ({ users, title }) => (
|
||||
<img
|
||||
src={user.avatar}
|
||||
className="absolute top-0 left-0 h-full w-full object-cover rounded-full"
|
||||
alt={user.email ?? "None"}
|
||||
alt={user.display_name ?? "None"}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<div className="grid place-items-center flex-shrink-0 rounded-full bg-gray-700 text-[11px] capitalize text-white h-4 w-4">
|
||||
{user.firstName !== "" ? user.firstName[0] : "?"}
|
||||
{user.display_name !== "" ? user?.display_name?.[0] : "?"}
|
||||
</div>
|
||||
)}
|
||||
<span className="break-words text-brand-secondary">
|
||||
{user.firstName !== "" ? `${user.firstName} ${user.lastName}` : "No assignee"}
|
||||
<span className="break-words text-custom-text-200">
|
||||
{user.display_name !== "" ? `${user.display_name}` : "No assignee"}
|
||||
</span>
|
||||
</div>
|
||||
<span className="flex-shrink-0">{user.count}</span>
|
||||
</div>
|
||||
</a>
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
<div className="text-brand-secondary text-center text-sm py-8">No matching data found.</div>
|
||||
<div className="px-7 py-4">
|
||||
<ProfileEmptyState title="No Data yet" description={emptyStateMessage} image={emptyUsers} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -56,22 +56,28 @@ export const ScopeAndDemand: React.FC<Props> = ({ fullScreen = true }) => {
|
||||
<AnalyticsLeaderboard
|
||||
users={defaultAnalytics.most_issue_created_user?.map((user) => ({
|
||||
avatar: user?.created_by__avatar,
|
||||
email: user?.created_by__email,
|
||||
firstName: user?.created_by__first_name,
|
||||
lastName: user?.created_by__last_name,
|
||||
display_name: user?.created_by__display_name,
|
||||
count: user?.count,
|
||||
id: user?.created_by__id,
|
||||
}))}
|
||||
title="Most issues created"
|
||||
emptyStateMessage="Co-workers and the number issues created by them appears here."
|
||||
workspaceSlug={workspaceSlug?.toString() ?? ""}
|
||||
/>
|
||||
<AnalyticsLeaderboard
|
||||
users={defaultAnalytics.most_issue_closed_user?.map((user) => ({
|
||||
avatar: user?.assignees__avatar,
|
||||
email: user?.assignees__email,
|
||||
firstName: user?.assignees__first_name,
|
||||
lastName: user?.assignees__last_name,
|
||||
display_name: user?.assignees__display_name,
|
||||
count: user?.count,
|
||||
id: user?.assignees__id,
|
||||
}))}
|
||||
title="Most issues closed"
|
||||
emptyStateMessage="Co-workers and the number issues closed by them appears here."
|
||||
workspaceSlug={workspaceSlug?.toString() ?? ""}
|
||||
/>
|
||||
<div className={fullScreen ? "md:col-span-2" : ""}>
|
||||
<AnalyticsYearWiseIssues defaultAnalytics={defaultAnalytics} />
|
||||
@@ -88,7 +94,7 @@ export const ScopeAndDemand: React.FC<Props> = ({ fullScreen = true }) => {
|
||||
)
|
||||
) : (
|
||||
<div className="grid h-full place-items-center p-5">
|
||||
<div className="space-y-4 text-brand-secondary">
|
||||
<div className="space-y-4 text-custom-text-200">
|
||||
<p className="text-sm">There was some error in fetching the data.</p>
|
||||
<div className="flex items-center justify-center gap-2">
|
||||
<PrimaryButton onClick={() => mutateDefaultAnalytics()}>Refresh</PrimaryButton>
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
// ui
|
||||
import { BarGraph } from "components/ui";
|
||||
import { BarGraph, ProfileEmptyState } from "components/ui";
|
||||
// image
|
||||
import emptyBarGraph from "public/empty-state/empty_bar_graph.svg";
|
||||
// types
|
||||
import { IDefaultAnalyticsResponse } from "types";
|
||||
|
||||
@@ -8,31 +10,28 @@ type Props = {
|
||||
};
|
||||
|
||||
export const AnalyticsScope: React.FC<Props> = ({ defaultAnalytics }) => (
|
||||
<div className="rounded-[10px] border border-brand-base">
|
||||
<div className="rounded-[10px] border border-custom-border-200">
|
||||
<h5 className="p-3 text-xs text-green-500">SCOPE</h5>
|
||||
<div className="divide-y divide-brand-base">
|
||||
<div className="divide-y divide-custom-border-200">
|
||||
<div>
|
||||
<h6 className="px-3 text-base font-medium">Pending issues</h6>
|
||||
{defaultAnalytics.pending_issue_user.length > 0 ? (
|
||||
<BarGraph
|
||||
data={defaultAnalytics.pending_issue_user}
|
||||
indexBy="assignees__email"
|
||||
indexBy="assignees__display_name"
|
||||
keys={["count"]}
|
||||
height="250px"
|
||||
colors={() => `#f97316`}
|
||||
customYAxisTickValues={defaultAnalytics.pending_issue_user.map((d) => d.count)}
|
||||
tooltip={(datum) => {
|
||||
const assignee = defaultAnalytics.pending_issue_user.find(
|
||||
(a) => a.assignees__email === `${datum.indexValue}`
|
||||
(a) => a.assignees__display_name === `${datum.indexValue}`
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="rounded-md border border-brand-base bg-brand-surface-2 p-2 text-xs">
|
||||
<span className="font-medium text-brand-secondary">
|
||||
{assignee
|
||||
? assignee.assignees__first_name + " " + assignee.assignees__last_name
|
||||
: "No assignee"}
|
||||
:{" "}
|
||||
<div className="rounded-md border border-custom-border-200 bg-custom-background-80 p-2 text-xs">
|
||||
<span className="font-medium text-custom-text-200">
|
||||
{assignee ? assignee.assignees__display_name : "No assignee"}:{" "}
|
||||
</span>
|
||||
{datum.value}
|
||||
</div>
|
||||
@@ -69,13 +68,16 @@ export const AnalyticsScope: React.FC<Props> = ({ defaultAnalytics }) => (
|
||||
}}
|
||||
margin={{ top: 20 }}
|
||||
theme={{
|
||||
background: "rgb(var(--color-bg-base))",
|
||||
axis: {},
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
<div className="text-brand-secondary text-center text-sm py-8">
|
||||
No matching data found.
|
||||
<div className="px-7 py-4">
|
||||
<ProfileEmptyState
|
||||
title="No Data yet"
|
||||
description="Analysis of pending issues by co-workers appears here."
|
||||
image={emptyBarGraph}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
// ui
|
||||
import { LineGraph } from "components/ui";
|
||||
import { LineGraph, ProfileEmptyState } from "components/ui";
|
||||
// image
|
||||
import emptyGraph from "public/empty-state/empty_graph.svg";
|
||||
// types
|
||||
import { IDefaultAnalyticsResponse } from "types";
|
||||
// constants
|
||||
@@ -9,54 +11,52 @@ type Props = {
|
||||
defaultAnalytics: IDefaultAnalyticsResponse;
|
||||
};
|
||||
|
||||
export const AnalyticsYearWiseIssues: React.FC<Props> = ({ defaultAnalytics }) => {
|
||||
const currentMonth = new Date().getMonth();
|
||||
const startMonth = Math.floor(currentMonth / 3) * 3 + 1;
|
||||
const quarterMonthsList = [startMonth, startMonth + 1, startMonth + 2];
|
||||
|
||||
return (
|
||||
<div className="py-3 border border-brand-base rounded-[10px]">
|
||||
<h1 className="px-3 text-base font-medium">Issues closed in a year</h1>
|
||||
{defaultAnalytics.issue_completed_month_wise.length > 0 ? (
|
||||
<LineGraph
|
||||
data={[
|
||||
{
|
||||
id: "issues_closed",
|
||||
color: "rgb(var(--color-accent))",
|
||||
data: MONTHS_LIST.map((month) => ({
|
||||
x: month.label.substring(0, 3),
|
||||
y:
|
||||
defaultAnalytics.issue_completed_month_wise.find(
|
||||
(data) => data.month === month.value
|
||||
)?.count || 0,
|
||||
})),
|
||||
},
|
||||
]}
|
||||
customYAxisTickValues={defaultAnalytics.issue_completed_month_wise.map((data) => {
|
||||
if (quarterMonthsList.includes(data.month)) return data.count;
|
||||
|
||||
return 0;
|
||||
})}
|
||||
height="300px"
|
||||
colors={(datum) => datum.color}
|
||||
curve="monotoneX"
|
||||
margin={{ top: 20 }}
|
||||
enableSlices="x"
|
||||
sliceTooltip={(datum) => (
|
||||
<div className="rounded-md border border-brand-base bg-brand-surface-2 p-2 text-xs">
|
||||
{datum.slice.points[0].data.yFormatted}
|
||||
<span className="text-brand-secondary"> issues closed in </span>
|
||||
{datum.slice.points[0].data.xFormatted}
|
||||
</div>
|
||||
)}
|
||||
theme={{
|
||||
background: "rgb(var(--color-bg-base))",
|
||||
}}
|
||||
enableArea
|
||||
export const AnalyticsYearWiseIssues: React.FC<Props> = ({ defaultAnalytics }) => (
|
||||
<div className="py-3 border border-custom-border-200 rounded-[10px]">
|
||||
<h1 className="px-3 text-base font-medium">Issues closed in a year</h1>
|
||||
{defaultAnalytics.issue_completed_month_wise.length > 0 ? (
|
||||
<LineGraph
|
||||
data={[
|
||||
{
|
||||
id: "issues_closed",
|
||||
color: "rgb(var(--color-primary-100))",
|
||||
data: MONTHS_LIST.map((month) => ({
|
||||
x: month.label.substring(0, 3),
|
||||
y:
|
||||
defaultAnalytics.issue_completed_month_wise.find(
|
||||
(data) => data.month === month.value
|
||||
)?.count || 0,
|
||||
})),
|
||||
},
|
||||
]}
|
||||
customYAxisTickValues={defaultAnalytics.issue_completed_month_wise.map(
|
||||
(data) => data.count
|
||||
)}
|
||||
height="300px"
|
||||
colors={(datum) => datum.color}
|
||||
curve="monotoneX"
|
||||
margin={{ top: 20 }}
|
||||
enableSlices="x"
|
||||
sliceTooltip={(datum) => (
|
||||
<div className="rounded-md border border-custom-border-200 bg-custom-background-80 p-2 text-xs">
|
||||
{datum.slice.points[0].data.yFormatted}
|
||||
<span className="text-custom-text-200"> issues closed in </span>
|
||||
{datum.slice.points[0].data.xFormatted}
|
||||
</div>
|
||||
)}
|
||||
theme={{
|
||||
background: "rgb(var(--color-background-100))",
|
||||
}}
|
||||
enableArea
|
||||
/>
|
||||
) : (
|
||||
<div className="px-7 py-4">
|
||||
<ProfileEmptyState
|
||||
title="No Data yet"
|
||||
description="Close issues to view analysis of the same in the form of a graph."
|
||||
image={emptyGraph}
|
||||
/>
|
||||
) : (
|
||||
<div className="text-brand-secondary text-center text-sm py-8">No matching data found.</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -15,7 +15,7 @@ export const SelectProject: React.FC<Props> = ({ value, onChange, projects }) =>
|
||||
query: project.name + project.identifier,
|
||||
content: (
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-brand-secondary text-[0.65rem]">{project.identifier}</span>
|
||||
<span className="text-custom-text-200 text-[0.65rem]">{project.identifier}</span>
|
||||
{project.name}
|
||||
</div>
|
||||
),
|
||||
|
||||
@@ -23,7 +23,7 @@ export const SelectSegment: React.FC<Props> = ({ value, onChange, params }) => {
|
||||
label={
|
||||
<span>
|
||||
{ANALYTICS_X_AXIS_VALUES.find((v) => v.value === value)?.label ?? (
|
||||
<span className="text-brand-secondary">No value</span>
|
||||
<span className="text-custom-text-200">No value</span>
|
||||
)}
|
||||
</span>
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ export const NotAuthorizedView: React.FC<Props> = ({ actionButton, type }) => {
|
||||
|
||||
return (
|
||||
<DefaultLayout>
|
||||
<div className="flex h-full w-full flex-col items-center justify-center gap-y-5 bg-brand-surface-1 text-center">
|
||||
<div className="flex h-full w-full flex-col items-center justify-center gap-y-5 bg-custom-background-100 text-center">
|
||||
<div className="h-44 w-72">
|
||||
<Image
|
||||
src={type === "project" ? ProjectNotAuthorizedImg : WorkspaceNotAuthorizedImg}
|
||||
@@ -31,16 +31,16 @@ export const NotAuthorizedView: React.FC<Props> = ({ actionButton, type }) => {
|
||||
alt="ProjectSettingImg"
|
||||
/>
|
||||
</div>
|
||||
<h1 className="text-xl font-medium text-brand-base">
|
||||
<h1 className="text-xl font-medium text-custom-text-100">
|
||||
Oops! You are not authorized to view this page
|
||||
</h1>
|
||||
|
||||
<div className="w-full max-w-md text-base text-brand-secondary">
|
||||
<div className="w-full max-w-md text-base text-custom-text-200">
|
||||
{user ? (
|
||||
<p>
|
||||
You have signed in as {user.email}. <br />
|
||||
<Link href={`/?next=${currentPath}`}>
|
||||
<a className="font-medium text-brand-base">Sign in</a>
|
||||
<a className="font-medium text-custom-text-100">Sign in</a>
|
||||
</Link>{" "}
|
||||
with different account that has access to this page.
|
||||
</p>
|
||||
@@ -48,7 +48,7 @@ export const NotAuthorizedView: React.FC<Props> = ({ actionButton, type }) => {
|
||||
<p>
|
||||
You need to{" "}
|
||||
<Link href={`/?next=${currentPath}`}>
|
||||
<a className="font-medium text-brand-base">Sign in</a>
|
||||
<a className="font-medium text-custom-text-100">Sign in</a>
|
||||
</Link>{" "}
|
||||
with an account that has access to this page.
|
||||
</p>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user