mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Compare commits
230 Commits
fix-copy_c
...
chore-adva
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2a2feaf88e | ||
|
|
e48b2da623 | ||
|
|
9c9952a823 | ||
|
|
906ce8b500 | ||
|
|
6c483fad2f | ||
|
|
5b776392bd | ||
|
|
ba158d5d6e | ||
|
|
084cc75726 | ||
|
|
534f5c7dd0 | ||
|
|
080cf70e3f | ||
|
|
4c3f7f27a5 | ||
|
|
803f6cc62a | ||
|
|
3a6d0c11fb | ||
|
|
75d81f9e95 | ||
|
|
0d5c7c6653 | ||
|
|
079c3a3a99 | ||
|
|
5f8d5ea388 | ||
|
|
8613a80b16 | ||
|
|
dc16f2862e | ||
|
|
e68d344410 | ||
|
|
26c8cba322 | ||
|
|
b435ceedfc | ||
|
|
13c46e0fdf | ||
|
|
02bccb44d6 | ||
|
|
b5634f5fa1 | ||
|
|
64aae0a2ac | ||
|
|
a263bfc01f | ||
|
|
50082f0843 | ||
|
|
30db59534d | ||
|
|
e401c9d6e4 | ||
|
|
39b5736c83 | ||
|
|
2785419d12 | ||
|
|
ac5b974d67 | ||
|
|
14ebaf0799 | ||
|
|
7cdb622663 | ||
|
|
855e4a3218 | ||
|
|
d456767492 | ||
|
|
6faff1d556 | ||
|
|
bc2936dcd3 | ||
|
|
d366ac1581 | ||
|
|
0a01e0eb41 | ||
|
|
b4cc2d83fe | ||
|
|
42e2b787f0 | ||
|
|
fbca9d9a7a | ||
|
|
dbc00e4add | ||
|
|
28f9733d1b | ||
|
|
1e46290727 | ||
|
|
5a1df8b496 | ||
|
|
f23a2f0780 | ||
|
|
d10bb0b638 | ||
|
|
c4ddff5419 | ||
|
|
10f5b4e9b8 | ||
|
|
cdca5a4126 | ||
|
|
14dc6a56bc | ||
|
|
55340f9f48 | ||
|
|
efa64fc4b8 | ||
|
|
f5449c8f93 | ||
|
|
baabb82669 | ||
|
|
298e3dc9ca | ||
|
|
190300bc6c | ||
|
|
550fe547e2 | ||
|
|
f278a284c4 | ||
|
|
2bcf6c76cd | ||
|
|
fb3e022042 | ||
|
|
e3fbb7b073 | ||
|
|
cce6dd581c | ||
|
|
d86ac368a4 | ||
|
|
101994840a | ||
|
|
f60f57ef11 | ||
|
|
546217f09b | ||
|
|
6df8323665 | ||
|
|
77d022df71 | ||
|
|
797f150ec4 | ||
|
|
b54f54999e | ||
|
|
dff176be8f | ||
|
|
2bbaaed3ea | ||
|
|
b5ceb94fb2 | ||
|
|
feb6243065 | ||
|
|
5dacba74c9 | ||
|
|
0efb0c239c | ||
|
|
c8be836d6c | ||
|
|
833b82e247 | ||
|
|
280aa7f671 | ||
|
|
eac1115566 | ||
|
|
8166a757a7 | ||
|
|
be5d77d978 | ||
|
|
18fb3b8450 | ||
|
|
ef5616905e | ||
|
|
aeb41e603c | ||
|
|
55eea1a8b7 | ||
|
|
fa87ff14b7 | ||
|
|
7d91b5f8df | ||
|
|
3ce40dfa2f | ||
|
|
f65253c994 | ||
|
|
97fcfaa653 | ||
|
|
0e1ebff978 | ||
|
|
642dabfe35 | ||
|
|
48557cb670 | ||
|
|
608da1465c | ||
|
|
dbcc7bedb4 | ||
|
|
c401b26dd4 | ||
|
|
a4bca0c39c | ||
|
|
24899887b2 | ||
|
|
c6953ff878 | ||
|
|
06be9ab81b | ||
|
|
ed8d00acb1 | ||
|
|
915e374485 | ||
|
|
1d5b93cebd | ||
|
|
df65b8c34a | ||
|
|
4c688b1d25 | ||
|
|
bfc6ed839f | ||
|
|
b68396a4b2 | ||
|
|
b4fc715aba | ||
|
|
33a1b916cb | ||
|
|
2818310619 | ||
|
|
882520b3c7 | ||
|
|
20132e7544 | ||
|
|
0ae57b49d2 | ||
|
|
d347269afb | ||
|
|
a3fd616ec4 | ||
|
|
9eeff158d5 | ||
|
|
ef20b5814e | ||
|
|
14914e8716 | ||
|
|
b738e39a4a | ||
|
|
993c7899b6 | ||
|
|
2b411de1e3 | ||
|
|
1f9222065e | ||
|
|
670134562f | ||
|
|
144c793e9e | ||
|
|
0a924e4824 | ||
|
|
08702a5381 | ||
|
|
270f282c3c | ||
|
|
37699362ad | ||
|
|
27cec64c56 | ||
|
|
782b09eeaf | ||
|
|
5ac5892fe5 | ||
|
|
96c403ff0b | ||
|
|
543552f492 | ||
|
|
c3cfcc1b92 | ||
|
|
ac84d6ecf0 | ||
|
|
475b7a8396 | ||
|
|
00f78bd6a1 | ||
|
|
34337f90c1 | ||
|
|
4f68aaafa6 | ||
|
|
9c10235fca | ||
|
|
9c1b158291 | ||
|
|
2d0a15efd6 | ||
|
|
d62ac6269b | ||
|
|
d9e3405f5a | ||
|
|
adee686ea3 | ||
|
|
81fae36c23 | ||
|
|
3f652ba44e | ||
|
|
16aa1d7034 | ||
|
|
0db581509c | ||
|
|
523ab3f4a1 | ||
|
|
a57c37c26c | ||
|
|
65a0530cfe | ||
|
|
7bb291408d | ||
|
|
4be94adaca | ||
|
|
2d1b3fb39e | ||
|
|
585432824f | ||
|
|
fe9640533c | ||
|
|
5ec817ba37 | ||
|
|
9279b5f1fb | ||
|
|
921dfe3222 | ||
|
|
8216785b27 | ||
|
|
2bfe4d6a6e | ||
|
|
691cbef1f2 | ||
|
|
fed0ef6185 | ||
|
|
e8779511ad | ||
|
|
99dba80d19 | ||
|
|
471fefce8b | ||
|
|
869c755065 | ||
|
|
a5ffbffed9 | ||
|
|
784d651c5b | ||
|
|
b19bca3b50 | ||
|
|
1121c58ada | ||
|
|
fb2987e9ef | ||
|
|
a25cd426a9 | ||
|
|
993713925a | ||
|
|
ae6e5a48fa | ||
|
|
c125bc54ba | ||
|
|
41447e566a | ||
|
|
cebd0b3599 | ||
|
|
caa522118d | ||
|
|
aea5f39059 | ||
|
|
f29867968a | ||
|
|
c91972cc0a | ||
|
|
84c7375d2a | ||
|
|
5cb37a0b9c | ||
|
|
c347dd7dcd | ||
|
|
0ec206b75d | ||
|
|
e8718a84fe | ||
|
|
983e0fa081 | ||
|
|
ef108839c4 | ||
|
|
fe04e5a292 | ||
|
|
50e0cb7ffd | ||
|
|
d37d210921 | ||
|
|
ab3eadf767 | ||
|
|
dbdf2f001a | ||
|
|
0d069bf46e | ||
|
|
962923ff4f | ||
|
|
f720a9afb2 | ||
|
|
4032aa62c5 | ||
|
|
cbe248591e | ||
|
|
75a9b71edb | ||
|
|
ef42ce04a4 | ||
|
|
6bafdb6dd8 | ||
|
|
72307ec100 | ||
|
|
2eb1d03c20 | ||
|
|
94bf90dac5 | ||
|
|
b0e941e4e2 | ||
|
|
e22265dc93 | ||
|
|
075c234385 | ||
|
|
bc539e0d01 | ||
|
|
04fb13cbca | ||
|
|
ca5cf27957 | ||
|
|
433682e913 | ||
|
|
f181b671f3 | ||
|
|
f82d4a9ead | ||
|
|
3f22642732 | ||
|
|
e339b7ad8f | ||
|
|
d4991b9a48 | ||
|
|
1bf683e044 | ||
|
|
807148671f | ||
|
|
d2b81ad2da | ||
|
|
3d14c9d9fe | ||
|
|
d7f40cf578 | ||
|
|
b370ef72ee | ||
|
|
0341205666 |
52
.github/workflows/build-branch.yml
vendored
52
.github/workflows/build-branch.yml
vendored
@@ -47,12 +47,6 @@ jobs:
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
build_proxy: ${{ steps.changed_files.outputs.proxy_any_changed }}
|
||||
build_apiserver: ${{ steps.changed_files.outputs.apiserver_any_changed }}
|
||||
build_admin: ${{ steps.changed_files.outputs.admin_any_changed }}
|
||||
build_space: ${{ steps.changed_files.outputs.space_any_changed }}
|
||||
build_web: ${{ steps.changed_files.outputs.web_any_changed }}
|
||||
build_live: ${{ steps.changed_files.outputs.live_any_changed }}
|
||||
|
||||
dh_img_web: ${{ steps.set_env_variables.outputs.DH_IMG_WEB }}
|
||||
dh_img_space: ${{ steps.set_env_variables.outputs.DH_IMG_SPACE }}
|
||||
@@ -123,46 +117,7 @@ jobs:
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
uses: tj-actions/changed-files@v42
|
||||
with:
|
||||
files_yaml: |
|
||||
apiserver:
|
||||
- apiserver/**
|
||||
proxy:
|
||||
- nginx/**
|
||||
admin:
|
||||
- admin/**
|
||||
- packages/**
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
space:
|
||||
- space/**
|
||||
- packages/**
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
web:
|
||||
- web/**
|
||||
- packages/**
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
- "tsconfig.json"
|
||||
- "turbo.json"
|
||||
live:
|
||||
- live/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
|
||||
branch_build_push_admin:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_admin == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Admin Docker Image
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
@@ -185,7 +140,6 @@ jobs:
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_web:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_web == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Web Docker Image
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
@@ -208,7 +162,6 @@ jobs:
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_space:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Space Docker Image
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
@@ -231,7 +184,6 @@ jobs:
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_live:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_live == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Live Collaboration Docker Image
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
@@ -254,7 +206,6 @@ jobs:
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_apiserver:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_apiserver == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push API Server Docker Image
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
@@ -277,7 +228,6 @@ jobs:
|
||||
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||
|
||||
branch_build_push_proxy:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
name: Build-Push Proxy Docker Image
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [branch_build_setup]
|
||||
@@ -323,7 +273,7 @@ jobs:
|
||||
run: |
|
||||
cp ./deploy/selfhost/install.sh deploy/selfhost/setup.sh
|
||||
sed -i 's/${APP_RELEASE:-stable}/${APP_RELEASE:-'${REL_VERSION}'}/g' deploy/selfhost/docker-compose.yml
|
||||
sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deploy/selfhost/variables.env
|
||||
# sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deploy/selfhost/variables.env
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
|
||||
51
.github/workflows/build-test-pull-request.yml
vendored
51
.github/workflows/build-test-pull-request.yml
vendored
@@ -6,49 +6,9 @@ on:
|
||||
types: ["opened", "synchronize", "ready_for_review"]
|
||||
|
||||
jobs:
|
||||
get-changed-files:
|
||||
lint-apiserver:
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
apiserver_changed: ${{ steps.changed-files.outputs.apiserver_any_changed }}
|
||||
admin_changed: ${{ steps.changed-files.outputs.admin_any_changed }}
|
||||
space_changed: ${{ steps.changed-files.outputs.space_any_changed }}
|
||||
web_changed: ${{ steps.changed-files.outputs.web_any_changed }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v44
|
||||
with:
|
||||
files_yaml: |
|
||||
apiserver:
|
||||
- apiserver/**
|
||||
admin:
|
||||
- admin/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
space:
|
||||
- space/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
web:
|
||||
- web/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
|
||||
lint-apiserver:
|
||||
needs: get-changed-files
|
||||
runs-on: ubuntu-latest
|
||||
if: needs.get-changed-files.outputs.apiserver_changed == 'true'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
@@ -63,8 +23,7 @@ jobs:
|
||||
run: ruff check --fix apiserver
|
||||
|
||||
lint-admin:
|
||||
needs: get-changed-files
|
||||
if: needs.get-changed-files.outputs.admin_changed == 'true'
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -76,8 +35,7 @@ jobs:
|
||||
- run: yarn lint --filter=admin
|
||||
|
||||
lint-space:
|
||||
needs: get-changed-files
|
||||
if: needs.get-changed-files.outputs.space_changed == 'true'
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -89,8 +47,7 @@ jobs:
|
||||
- run: yarn lint --filter=space
|
||||
|
||||
lint-web:
|
||||
needs: get-changed-files
|
||||
if: needs.get-changed-files.outputs.web_changed == 'true'
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -1,5 +1,6 @@
|
||||
node_modules
|
||||
.next
|
||||
.yarn
|
||||
|
||||
### NextJS ###
|
||||
# Dependencies
|
||||
@@ -78,10 +79,17 @@ pnpm-workspace.yaml
|
||||
.npmrc
|
||||
.secrets
|
||||
tmp/
|
||||
|
||||
## packages
|
||||
dist
|
||||
.temp/
|
||||
deploy/selfhost/plane-app/
|
||||
|
||||
## Storybook
|
||||
*storybook.log
|
||||
output.css
|
||||
|
||||
dev-editor
|
||||
# Redis
|
||||
*.rdb
|
||||
*.rdb.gz
|
||||
|
||||
1
.yarnrc.yml
Normal file
1
.yarnrc.yml
Normal file
@@ -0,0 +1 @@
|
||||
nodeLinker: node-modules
|
||||
@@ -15,14 +15,33 @@ Without said minimal reproduction, we won't be able to investigate all [issues](
|
||||
|
||||
You can open a new issue with this [issue form](https://github.com/makeplane/plane/issues/new).
|
||||
|
||||
### Naming conventions for issues
|
||||
|
||||
When opening a new issue, please use a clear and concise title that follows this format:
|
||||
|
||||
- For bugs: `🐛 Bug: [short description]`
|
||||
- For features: `🚀 Feature: [short description]`
|
||||
- For improvements: `🛠️ Improvement: [short description]`
|
||||
- For documentation: `📘 Docs: [short description]`
|
||||
|
||||
**Examples:**
|
||||
- `🐛 Bug: API token expiry time not saving correctly`
|
||||
- `📘 Docs: Clarify RAM requirement for local setup`
|
||||
- `🚀 Feature: Allow custom time selection for token expiration`
|
||||
|
||||
This helps us triage and manage issues more efficiently.
|
||||
|
||||
## Projects setup and Architecture
|
||||
|
||||
### Requirements
|
||||
|
||||
- Node.js version v16.18.0
|
||||
- Docker Engine installed and running
|
||||
- Node.js version 20+ [LTS version](https://nodejs.org/en/about/previous-releases)
|
||||
- Python version 3.8+
|
||||
- Postgres version v14
|
||||
- Redis version v6.2.7
|
||||
- **Memory**: Minimum **12 GB RAM** recommended
|
||||
> ⚠️ Running the project on a system with only 8 GB RAM may lead to setup failures or memory crashes (especially during Docker container build/start or dependency install). Use cloud environments like GitHub Codespaces or upgrade local RAM if possible.
|
||||
|
||||
### Setup the project
|
||||
|
||||
@@ -50,6 +69,17 @@ chmod +x setup.sh
|
||||
docker compose -f docker-compose-local.yml up
|
||||
```
|
||||
|
||||
5. Start web apps:
|
||||
|
||||
```bash
|
||||
yarn dev
|
||||
```
|
||||
|
||||
6. Open your browser to http://localhost:3001/god-mode/ and register yourself as instance admin
|
||||
7. Open up your browser to http://localhost:3000 then log in using the same credentials from the previous step
|
||||
|
||||
That’s it! You’re all set to begin coding. Remember to refresh your browser if changes don’t auto-reload. Happy contributing! 🎉
|
||||
|
||||
## Missing a Feature?
|
||||
|
||||
If a feature is missing, you can directly _request_ a new one [here](https://github.com/makeplane/plane/issues/new?assignees=&labels=feature&template=feature_request.yml&title=%F0%9F%9A%80+Feature%3A+). You also can do the same by choosing "🚀 Feature" when raising a [New Issue](https://github.com/makeplane/plane/issues/new/choose) on our GitHub Repository.
|
||||
@@ -75,7 +105,7 @@ To ensure consistency throughout the source code, please keep these rules in min
|
||||
- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
|
||||
|
||||
## Contributing to language support
|
||||
This guide is designed to help contributors understand how to add or update translations in the application.
|
||||
This guide is designed to help contributors understand how to add or update translations in the application.
|
||||
|
||||
### Understanding translation structure
|
||||
|
||||
@@ -90,7 +120,7 @@ packages/i18n/src/locales/
|
||||
├── fr/
|
||||
│ └── translations.json
|
||||
└── [language]/
|
||||
└── translations.json
|
||||
└── translations.json
|
||||
```
|
||||
#### Nested structure
|
||||
To keep translations organized, we use a nested structure for keys. This makes it easier to manage and locate specific translations. For example:
|
||||
@@ -110,14 +140,14 @@ To keep translations organized, we use a nested structure for keys. This makes i
|
||||
We use [IntlMessageFormat](https://formatjs.github.io/docs/intl-messageformat/) to handle dynamic content, such as variables and pluralization. Here's how to format your translations:
|
||||
|
||||
#### Examples
|
||||
- **Simple variables**
|
||||
- **Simple variables**
|
||||
```json
|
||||
{
|
||||
"greeting": "Hello, {name}!"
|
||||
}
|
||||
```
|
||||
|
||||
- **Pluralization**
|
||||
- **Pluralization**
|
||||
```json
|
||||
{
|
||||
"items": "{count, plural, one {Work item} other {Work items}}"
|
||||
@@ -142,15 +172,15 @@ We use [IntlMessageFormat](https://formatjs.github.io/docs/intl-messageformat/)
|
||||
### Adding new languages
|
||||
Adding a new language involves several steps to ensure it integrates seamlessly with the project. Follow these instructions carefully:
|
||||
|
||||
1. **Update type definitions**
|
||||
1. **Update type definitions**
|
||||
Add the new language to the TLanguage type in the language definitions file:
|
||||
|
||||
```typescript
|
||||
// types/language.ts
|
||||
export type TLanguage = "en" | "fr" | "your-lang";
|
||||
```
|
||||
```
|
||||
|
||||
2. **Add language configuration**
|
||||
2. **Add language configuration**
|
||||
Include the new language in the list of supported languages:
|
||||
|
||||
```typescript
|
||||
@@ -161,14 +191,14 @@ Include the new language in the list of supported languages:
|
||||
];
|
||||
```
|
||||
|
||||
3. **Create translation files**
|
||||
3. **Create translation files**
|
||||
1. Create a new folder for your language under locales (e.g., `locales/your-lang/`).
|
||||
|
||||
2. Add a `translations.json` file inside the folder.
|
||||
|
||||
3. Copy the structure from an existing translation file and translate all keys.
|
||||
|
||||
4. **Update import logic**
|
||||
4. **Update import logic**
|
||||
Modify the language import logic to include your new language:
|
||||
|
||||
```typescript
|
||||
|
||||
@@ -43,9 +43,6 @@ NGINX_PORT=80
|
||||
# Debug value for api server use it as 0 for production use
|
||||
DEBUG=0
|
||||
CORS_ALLOWED_ORIGINS="http://localhost"
|
||||
# Error logs
|
||||
SENTRY_DSN=""
|
||||
SENTRY_ENVIRONMENT="development"
|
||||
# Database Settings
|
||||
POSTGRES_USER="plane"
|
||||
POSTGRES_PASSWORD="plane"
|
||||
|
||||
65
README.md
65
README.md
@@ -16,10 +16,10 @@
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://dub.sh/plane-website-readme"><b>Website</b></a> •
|
||||
<a href="https://git.new/releases"><b>Releases</b></a> •
|
||||
<a href="https://dub.sh/planepowershq"><b>Twitter</b></a> •
|
||||
<a href="https://dub.sh/planedocs"><b>Documentation</b></a>
|
||||
<a href="https://plane.so/"><b>Website</b></a> •
|
||||
<a href="https://github.com/makeplane/plane/releases"><b>Releases</b></a> •
|
||||
<a href="https://twitter.com/planepowers"><b>Twitter</b></a> •
|
||||
<a href="https://docs.plane.so/"><b>Documentation</b></a>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
@@ -39,7 +39,7 @@
|
||||
</a>
|
||||
</p>
|
||||
|
||||
Meet [Plane](https://dub.sh/plane-website-readme), an open-source project management tool to track issues, run ~sprints~ cycles, and manage product roadmaps without the chaos of managing the tool itself. 🧘♀️
|
||||
Meet [Plane](https://plane.so/), an open-source project management tool to track issues, run ~sprints~ cycles, and manage product roadmaps without the chaos of managing the tool itself. 🧘♀️
|
||||
|
||||
> Plane is evolving every day. Your suggestions, ideas, and reported bugs help us immensely. Do not hesitate to join in the conversation on [Discord](https://discord.com/invite/A92xrEGCge) or raise a GitHub issue. We read everything and respond to most.
|
||||
|
||||
@@ -47,10 +47,10 @@ Meet [Plane](https://dub.sh/plane-website-readme), an open-source project manage
|
||||
|
||||
Getting started with Plane is simple. Choose the setup that works best for you:
|
||||
|
||||
- **Plane Cloud**
|
||||
- **Plane Cloud**
|
||||
Sign up for a free account on [Plane Cloud](https://app.plane.so)—it's the fastest way to get up and running without worrying about infrastructure.
|
||||
|
||||
- **Self-host Plane**
|
||||
- **Self-host Plane**
|
||||
Prefer full control over your data and infrastructure? Install and run Plane on your own servers. Follow our detailed [deployment guides](https://developers.plane.so/self-hosting/overview) to get started.
|
||||
|
||||
| Installation methods | Docs link |
|
||||
@@ -62,22 +62,22 @@ Prefer full control over your data and infrastructure? Install and run Plane on
|
||||
|
||||
## 🌟 Features
|
||||
|
||||
- **Issues**
|
||||
- **Issues**
|
||||
Efficiently create and manage tasks with a robust rich text editor that supports file uploads. Enhance organization and tracking by adding sub-properties and referencing related issues.
|
||||
|
||||
- **Cycles**
|
||||
- **Cycles**
|
||||
Maintain your team’s momentum with Cycles. Track progress effortlessly using burn-down charts and other insightful tools.
|
||||
|
||||
- **Modules**
|
||||
Simplify complex projects by dividing them into smaller, manageable modules.
|
||||
- **Modules**
|
||||
Simplify complex projects by dividing them into smaller, manageable modules.
|
||||
|
||||
- **Views**
|
||||
- **Views**
|
||||
Customize your workflow by creating filters to display only the most relevant issues. Save and share these views with ease.
|
||||
|
||||
- **Pages**
|
||||
- **Pages**
|
||||
Capture and organize ideas using Plane Pages, complete with AI capabilities and a rich text editor. Format text, insert images, add hyperlinks, or convert your notes into actionable items.
|
||||
|
||||
- **Analytics**
|
||||
- **Analytics**
|
||||
Access real-time insights across all your Plane data. Visualize trends, remove blockers, and keep your projects moving forward.
|
||||
|
||||
- **Drive** (_coming soon_): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution.
|
||||
@@ -85,38 +85,7 @@ Access real-time insights across all your Plane data. Visualize trends, remove b
|
||||
|
||||
## 🛠️ Local development
|
||||
|
||||
### Pre-requisites
|
||||
- Ensure Docker Engine is installed and running.
|
||||
|
||||
### Development setup
|
||||
Setting up your local environment is simple and straightforward. Follow these steps to get started:
|
||||
|
||||
1. Clone the repository:
|
||||
```
|
||||
git clone https://github.com/makeplane/plane.git
|
||||
```
|
||||
2. Navigate to the project folder:
|
||||
```
|
||||
cd plane
|
||||
```
|
||||
3. Create a new branch for your feature or fix:
|
||||
```
|
||||
git checkout -b <feature-branch-name>
|
||||
```
|
||||
4. Run the setup script in the terminal:
|
||||
```
|
||||
./setup.sh
|
||||
```
|
||||
5. Open the project in an IDE such as VS Code.
|
||||
|
||||
6. Review the `.env` files in the relevant folders. Refer to [Environment Setup](./ENV_SETUP.md) for details on the environment variables used.
|
||||
|
||||
7. Start the services using Docker:
|
||||
```
|
||||
docker compose -f docker-compose-local.yml up -d
|
||||
```
|
||||
|
||||
That’s it! You’re all set to begin coding. Remember to refresh your browser if changes don’t auto-reload. Happy contributing! 🎉
|
||||
See [CONTRIBUTING](./CONTRIBUTING.md)
|
||||
|
||||
## ⚙️ Built with
|
||||
[](https://nextjs.org/)
|
||||
@@ -194,7 +163,7 @@ Feel free to ask questions, report bugs, participate in discussions, share ideas
|
||||
|
||||
If you discover a security vulnerability in Plane, please report it responsibly instead of opening a public issue. We take all legitimate reports seriously and will investigate them promptly. See [Security policy](https://github.com/makeplane/plane/blob/master/SECURITY.md) for more info.
|
||||
|
||||
To disclose any security issues, please email us at security@plane.so.
|
||||
To disclose any security issues, please email us at security@plane.so.
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
@@ -219,4 +188,4 @@ Please read [CONTRIBUTING.md](https://github.com/makeplane/plane/blob/master/CON
|
||||
|
||||
|
||||
## License
|
||||
This project is licensed under the [GNU Affero General Public License v3.0](https://github.com/makeplane/plane/blob/master/LICENSE.txt).
|
||||
This project is licensed under the [GNU Affero General Public License v3.0](https://github.com/makeplane/plane/blob/master/LICENSE.txt).
|
||||
|
||||
@@ -1,3 +1,12 @@
|
||||
NEXT_PUBLIC_API_BASE_URL=""
|
||||
NEXT_PUBLIC_API_BASE_URL="http://localhost:8000"
|
||||
|
||||
NEXT_PUBLIC_WEB_BASE_URL="http://localhost:3000"
|
||||
|
||||
NEXT_PUBLIC_ADMIN_BASE_URL="http://localhost:3001"
|
||||
NEXT_PUBLIC_ADMIN_BASE_PATH="/god-mode"
|
||||
NEXT_PUBLIC_WEB_BASE_URL=""
|
||||
|
||||
NEXT_PUBLIC_SPACE_BASE_URL="http://localhost:3002"
|
||||
NEXT_PUBLIC_SPACE_BASE_PATH="/spaces"
|
||||
|
||||
NEXT_PUBLIC_LIVE_BASE_URL="http://localhost:3100"
|
||||
NEXT_PUBLIC_LIVE_BASE_PATH="/live"
|
||||
|
||||
@@ -43,6 +43,7 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
defaultValues: {
|
||||
GITHUB_CLIENT_ID: config["GITHUB_CLIENT_ID"],
|
||||
GITHUB_CLIENT_SECRET: config["GITHUB_CLIENT_SECRET"],
|
||||
GITHUB_ORGANIZATION_ID: config["GITHUB_ORGANIZATION_ID"],
|
||||
},
|
||||
});
|
||||
|
||||
@@ -93,6 +94,19 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
error: Boolean(errors.GITHUB_CLIENT_SECRET),
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
key: "GITHUB_ORGANIZATION_ID",
|
||||
type: "text",
|
||||
label: "Organization ID",
|
||||
description: (
|
||||
<>
|
||||
The organization github ID.
|
||||
</>
|
||||
),
|
||||
placeholder: "123456789",
|
||||
error: Boolean(errors.GITHUB_ORGANIZATION_ID),
|
||||
required: false,
|
||||
},
|
||||
];
|
||||
|
||||
const GITHUB_SERVICE_FIELD: TCopyField[] = [
|
||||
@@ -150,6 +164,7 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
reset({
|
||||
GITHUB_CLIENT_ID: response.find((item) => item.key === "GITHUB_CLIENT_ID")?.value,
|
||||
GITHUB_CLIENT_SECRET: response.find((item) => item.key === "GITHUB_CLIENT_SECRET")?.value,
|
||||
GITHUB_ORGANIZATION_ID: response.find((item) => item.key === "GITHUB_ORGANIZATION_ID")?.value,
|
||||
});
|
||||
})
|
||||
.catch((err) => console.error(err));
|
||||
|
||||
@@ -3,18 +3,16 @@
|
||||
import { ReactNode } from "react";
|
||||
import { ThemeProvider, useTheme } from "next-themes";
|
||||
import { SWRConfig } from "swr";
|
||||
// ui
|
||||
// plane imports
|
||||
import { ADMIN_BASE_PATH, DEFAULT_SWR_CONFIG } from "@plane/constants";
|
||||
import { Toast } from "@plane/ui";
|
||||
import { resolveGeneralTheme } from "@plane/utils";
|
||||
// constants
|
||||
// helpers
|
||||
// lib
|
||||
import { InstanceProvider } from "@/lib/instance-provider";
|
||||
import { StoreProvider } from "@/lib/store-provider";
|
||||
import { UserProvider } from "@/lib/user-provider";
|
||||
// styles
|
||||
import "./globals.css";
|
||||
import "@/styles/globals.css";
|
||||
|
||||
const ToastWithTheme = () => {
|
||||
const { resolvedTheme } = useTheme();
|
||||
|
||||
@@ -9,6 +9,19 @@ const nextConfig = {
|
||||
unoptimized: true,
|
||||
},
|
||||
basePath: process.env.NEXT_PUBLIC_ADMIN_BASE_PATH || "",
|
||||
transpilePackages: [
|
||||
"@plane/constants",
|
||||
"@plane/editor",
|
||||
"@plane/hooks",
|
||||
"@plane/i18n",
|
||||
"@plane/logger",
|
||||
"@plane/propel",
|
||||
"@plane/services",
|
||||
"@plane/shared-state",
|
||||
"@plane/types",
|
||||
"@plane/ui",
|
||||
"@plane/utils",
|
||||
],
|
||||
};
|
||||
|
||||
module.exports = nextConfig;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "admin",
|
||||
"description": "Admin UI for Plane",
|
||||
"version": "0.25.2",
|
||||
"version": "0.26.0",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
@@ -17,11 +17,11 @@
|
||||
"@headlessui/react": "^1.7.19",
|
||||
"@plane/constants": "*",
|
||||
"@plane/hooks": "*",
|
||||
"@plane/propel": "*",
|
||||
"@plane/services": "*",
|
||||
"@plane/types": "*",
|
||||
"@plane/ui": "*",
|
||||
"@plane/utils": "*",
|
||||
"@plane/services": "*",
|
||||
"@sentry/nextjs": "^8.54.0",
|
||||
"@tailwindcss/typography": "^0.5.9",
|
||||
"@types/lodash": "^4.17.0",
|
||||
"autoprefixer": "10.4.14",
|
||||
@@ -30,7 +30,7 @@
|
||||
"lucide-react": "^0.469.0",
|
||||
"mobx": "^6.12.0",
|
||||
"mobx-react": "^9.1.1",
|
||||
"next": "^14.2.20",
|
||||
"next": "^14.2.28",
|
||||
"next-themes": "^0.2.1",
|
||||
"postcss": "^8.4.38",
|
||||
"react": "^18.3.1",
|
||||
|
||||
@@ -1,8 +1,2 @@
|
||||
module.exports = {
|
||||
plugins: {
|
||||
"postcss-import": {},
|
||||
"tailwindcss/nesting": {},
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
};
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
module.exports = require("@plane/tailwind-config/postcss.config.js");
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
@import url("https://fonts.googleapis.com/css2?family=Inter:wght@200;300;400;500;600;700;800&display=swap");
|
||||
@import url("https://fonts.googleapis.com/css2?family=Material+Symbols+Rounded:opsz,wght,FILL,GRAD@48,400,0,0&display=swap");
|
||||
@import "@plane/propel/styles/fonts";
|
||||
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@@ -60,23 +59,31 @@
|
||||
--color-border-300: 212, 212, 212; /* strong border- 1 */
|
||||
--color-border-400: 185, 185, 185; /* strong border- 2 */
|
||||
|
||||
--color-shadow-2xs: 0px 0px 1px 0px rgba(23, 23, 23, 0.06), 0px 1px 2px 0px rgba(23, 23, 23, 0.06),
|
||||
--color-shadow-2xs:
|
||||
0px 0px 1px 0px rgba(23, 23, 23, 0.06), 0px 1px 2px 0px rgba(23, 23, 23, 0.06),
|
||||
0px 1px 2px 0px rgba(23, 23, 23, 0.14);
|
||||
--color-shadow-xs: 0px 1px 2px 0px rgba(0, 0, 0, 0.16), 0px 2px 4px 0px rgba(16, 24, 40, 0.12),
|
||||
--color-shadow-xs:
|
||||
0px 1px 2px 0px rgba(0, 0, 0, 0.16), 0px 2px 4px 0px rgba(16, 24, 40, 0.12),
|
||||
0px 1px 8px -1px rgba(16, 24, 40, 0.1);
|
||||
--color-shadow-sm: 0px 1px 4px 0px rgba(0, 0, 0, 0.01), 0px 4px 8px 0px rgba(0, 0, 0, 0.02),
|
||||
0px 1px 12px 0px rgba(0, 0, 0, 0.12);
|
||||
--color-shadow-rg: 0px 3px 6px 0px rgba(0, 0, 0, 0.1), 0px 4px 4px 0px rgba(16, 24, 40, 0.08),
|
||||
--color-shadow-sm:
|
||||
0px 1px 4px 0px rgba(0, 0, 0, 0.01), 0px 4px 8px 0px rgba(0, 0, 0, 0.02), 0px 1px 12px 0px rgba(0, 0, 0, 0.12);
|
||||
--color-shadow-rg:
|
||||
0px 3px 6px 0px rgba(0, 0, 0, 0.1), 0px 4px 4px 0px rgba(16, 24, 40, 0.08),
|
||||
0px 1px 12px 0px rgba(16, 24, 40, 0.04);
|
||||
--color-shadow-md: 0px 4px 8px 0px rgba(0, 0, 0, 0.12), 0px 6px 12px 0px rgba(16, 24, 40, 0.12),
|
||||
--color-shadow-md:
|
||||
0px 4px 8px 0px rgba(0, 0, 0, 0.12), 0px 6px 12px 0px rgba(16, 24, 40, 0.12),
|
||||
0px 1px 16px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-lg: 0px 6px 12px 0px rgba(0, 0, 0, 0.12), 0px 8px 16px 0px rgba(0, 0, 0, 0.12),
|
||||
--color-shadow-lg:
|
||||
0px 6px 12px 0px rgba(0, 0, 0, 0.12), 0px 8px 16px 0px rgba(0, 0, 0, 0.12),
|
||||
0px 1px 24px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-xl: 0px 0px 18px 0px rgba(0, 0, 0, 0.16), 0px 0px 24px 0px rgba(16, 24, 40, 0.16),
|
||||
--color-shadow-xl:
|
||||
0px 0px 18px 0px rgba(0, 0, 0, 0.16), 0px 0px 24px 0px rgba(16, 24, 40, 0.16),
|
||||
0px 0px 52px 0px rgba(16, 24, 40, 0.16);
|
||||
--color-shadow-2xl: 0px 8px 16px 0px rgba(0, 0, 0, 0.12), 0px 12px 24px 0px rgba(16, 24, 40, 0.12),
|
||||
--color-shadow-2xl:
|
||||
0px 8px 16px 0px rgba(0, 0, 0, 0.12), 0px 12px 24px 0px rgba(16, 24, 40, 0.12),
|
||||
0px 1px 32px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-3xl: 0px 12px 24px 0px rgba(0, 0, 0, 0.12), 0px 16px 32px 0px rgba(0, 0, 0, 0.12),
|
||||
--color-shadow-3xl:
|
||||
0px 12px 24px 0px rgba(0, 0, 0, 0.12), 0px 16px 32px 0px rgba(0, 0, 0, 0.12),
|
||||
0px 1px 48px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-4xl: 0px 8px 40px 0px rgba(0, 0, 61, 0.05), 0px 12px 32px -16px rgba(0, 0, 0, 0.05);
|
||||
|
||||
@@ -6,7 +6,8 @@
|
||||
"paths": {
|
||||
"@/*": ["core/*"],
|
||||
"@/public/*": ["public/*"],
|
||||
"@/plane-admin/*": ["ce/*"]
|
||||
"@/plane-admin/*": ["ce/*"],
|
||||
"@/styles/*": ["styles/*"]
|
||||
}
|
||||
},
|
||||
"include": ["next-env.d.ts", "next.config.js", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
|
||||
|
||||
@@ -145,11 +145,8 @@ RUN chmod +x /app/pg-setup.sh
|
||||
# APPLICATION ENVIRONMENT SETTINGS
|
||||
# *****************************************************************************
|
||||
ENV APP_DOMAIN=localhost
|
||||
|
||||
ENV WEB_URL=http://${APP_DOMAIN}
|
||||
ENV DEBUG=0
|
||||
ENV SENTRY_DSN=
|
||||
ENV SENTRY_ENVIRONMENT=production
|
||||
ENV CORS_ALLOWED_ORIGINS=http://${APP_DOMAIN},https://${APP_DOMAIN}
|
||||
# Secret Key
|
||||
ENV SECRET_KEY=60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
# Backend
|
||||
# Debug value for api server use it as 0 for production use
|
||||
DEBUG=0
|
||||
CORS_ALLOWED_ORIGINS="http://localhost"
|
||||
|
||||
# Error logs
|
||||
SENTRY_DSN=""
|
||||
SENTRY_ENVIRONMENT="development"
|
||||
CORS_ALLOWED_ORIGINS="http://localhost:3000,http://localhost:3001,http://localhost:3002,http://localhost:3100"
|
||||
|
||||
# Database Settings
|
||||
POSTGRES_USER="plane"
|
||||
@@ -31,7 +27,7 @@ RABBITMQ_VHOST="plane"
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
AWS_SECRET_ACCESS_KEY="secret-key"
|
||||
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
|
||||
AWS_S3_ENDPOINT_URL="http://localhost:9000"
|
||||
# Changing this requires change in the nginx.conf for uploads if using minio setup
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
# Maximum file upload limit
|
||||
@@ -41,22 +37,31 @@ FILE_SIZE_LIMIT=5242880
|
||||
DOCKERIZED=1 # deprecated
|
||||
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
USE_MINIO=0
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
|
||||
# Email redirections and minio domain settings
|
||||
WEB_URL="http://localhost"
|
||||
WEB_URL="http://localhost:8000"
|
||||
|
||||
# Gunicorn Workers
|
||||
GUNICORN_WORKERS=2
|
||||
|
||||
# Base URLs
|
||||
ADMIN_BASE_URL=
|
||||
SPACE_BASE_URL=
|
||||
APP_BASE_URL=
|
||||
ADMIN_BASE_URL="http://localhost:3001"
|
||||
ADMIN_BASE_PATH="/god-mode"
|
||||
|
||||
SPACE_BASE_URL="http://localhost:3002"
|
||||
SPACE_BASE_PATH="/spaces"
|
||||
|
||||
APP_BASE_URL="http://localhost:3000"
|
||||
APP_BASE_PATH=""
|
||||
|
||||
LIVE_BASE_URL="http://localhost:3100"
|
||||
LIVE_BASE_PATH="/live"
|
||||
|
||||
LIVE_SERVER_SECRET_KEY="secret-key"
|
||||
|
||||
# Hard delete files after days
|
||||
HARD_DELETE_AFTER_DAYS=60
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "plane-api",
|
||||
"version": "0.25.2",
|
||||
"version": "0.26.0",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"description": "API server powering Plane's backend"
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
# Third party imports
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
@@ -18,6 +19,14 @@ class CycleSerializer(BaseSerializer):
|
||||
completed_estimates = serializers.FloatField(read_only=True)
|
||||
started_estimates = serializers.FloatField(read_only=True)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
project = self.context.get("project")
|
||||
if project and project.timezone:
|
||||
project_timezone = pytz.timezone(project.timezone)
|
||||
self.fields["start_date"].timezone = project_timezone
|
||||
self.fields["end_date"].timezone = project_timezone
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
@@ -30,12 +39,15 @@ class CycleSerializer(BaseSerializer):
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("end_date", None) is not None
|
||||
):
|
||||
project_id = self.initial_data.get("project_id") or self.instance.project_id
|
||||
is_start_date_end_date_equal = (
|
||||
True
|
||||
if str(data.get("start_date")) == str(data.get("end_date"))
|
||||
else False
|
||||
project_id = self.initial_data.get("project_id") or (
|
||||
self.instance.project_id
|
||||
if self.instance and hasattr(self.instance, "project_id")
|
||||
else None
|
||||
)
|
||||
|
||||
if not project_id:
|
||||
raise serializers.ValidationError("Project ID is required")
|
||||
|
||||
data["start_date"] = convert_to_utc(
|
||||
date=str(data.get("start_date").date()),
|
||||
project_id=project_id,
|
||||
@@ -44,7 +56,6 @@ class CycleSerializer(BaseSerializer):
|
||||
data["end_date"] = convert_to_utc(
|
||||
date=str(data.get("end_date", None).date()),
|
||||
project_id=project_id,
|
||||
is_start_date_end_date_equal=is_start_date_end_date_equal,
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
@@ -16,7 +16,6 @@ class ProjectSerializer(BaseSerializer):
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
is_deployed = serializers.BooleanField(read_only=True)
|
||||
cover_image_url = serializers.CharField(read_only=True)
|
||||
inbox_view = serializers.BooleanField(read_only=True, source="intake_view")
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
|
||||
@@ -4,16 +4,6 @@ from plane.api.views import IntakeIssueAPIEndpoint
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/",
|
||||
IntakeIssueAPIEndpoint.as_view(),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:issue_id>/",
|
||||
IntakeIssueAPIEndpoint.as_view(),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-issues/",
|
||||
IntakeIssueAPIEndpoint.as_view(),
|
||||
|
||||
@@ -39,7 +39,7 @@ from plane.db.models import (
|
||||
UserFavorite,
|
||||
)
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
|
||||
from plane.utils.host import base_host
|
||||
from .base import BaseAPIView
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
|
||||
@@ -137,10 +137,14 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
||||
if pk:
|
||||
queryset = self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
|
||||
data = CycleSerializer(
|
||||
queryset, fields=self.fields, expand=self.expand
|
||||
queryset,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
queryset = self.get_queryset().filter(archived_at__isnull=True)
|
||||
@@ -152,7 +156,11 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
start_date__lte=timezone.now(), end_date__gte=timezone.now()
|
||||
)
|
||||
data = CycleSerializer(
|
||||
queryset, many=True, fields=self.fields, expand=self.expand
|
||||
queryset,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -163,7 +171,11 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields, expand=self.expand
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -174,7 +186,11 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields, expand=self.expand
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -185,7 +201,11 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields, expand=self.expand
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -198,14 +218,22 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields, expand=self.expand
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields, expand=self.expand
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -251,7 +279,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -323,7 +351,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -694,7 +722,7 @@ class CycleIssueAPIEndpoint(BaseAPIView):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
# Return all Cycle Issues
|
||||
return Response(
|
||||
@@ -760,6 +788,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -771,6 +800,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -819,6 +849,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
)
|
||||
)
|
||||
old_cycle = old_cycle.first()
|
||||
|
||||
estimate_type = Project.objects.filter(
|
||||
workspace__slug=slug,
|
||||
@@ -938,7 +969,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
estimate_completion_chart = burndown_plot(
|
||||
queryset=old_cycle.first(),
|
||||
queryset=old_cycle,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="points",
|
||||
@@ -1086,7 +1117,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
# Pass the new_cycle queryset to burndown_plot
|
||||
completion_chart = burndown_plot(
|
||||
queryset=old_cycle.first(),
|
||||
queryset=old_cycle,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="issues",
|
||||
@@ -1098,12 +1129,12 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
).first()
|
||||
|
||||
current_cycle.progress_snapshot = {
|
||||
"total_issues": old_cycle.first().total_issues,
|
||||
"completed_issues": old_cycle.first().completed_issues,
|
||||
"cancelled_issues": old_cycle.first().cancelled_issues,
|
||||
"started_issues": old_cycle.first().started_issues,
|
||||
"unstarted_issues": old_cycle.first().unstarted_issues,
|
||||
"backlog_issues": old_cycle.first().backlog_issues,
|
||||
"total_issues": old_cycle.total_issues,
|
||||
"completed_issues": old_cycle.completed_issues,
|
||||
"cancelled_issues": old_cycle.cancelled_issues,
|
||||
"started_issues": old_cycle.started_issues,
|
||||
"unstarted_issues": old_cycle.unstarted_issues,
|
||||
"backlog_issues": old_cycle.backlog_issues,
|
||||
"distribution": {
|
||||
"labels": label_distribution_data,
|
||||
"assignees": assignee_distribution_data,
|
||||
@@ -1168,7 +1199,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -18,8 +18,9 @@ from plane.api.serializers import IntakeIssueSerializer, IssueSerializer
|
||||
from plane.app.permissions import ProjectLitePermission
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import Intake, IntakeIssue, Issue, Project, ProjectMember, State
|
||||
|
||||
from plane.utils.host import base_host
|
||||
from .base import BaseAPIView
|
||||
from plane.db.models.intake import SourceType
|
||||
|
||||
|
||||
class IntakeIssueAPIEndpoint(BaseAPIView):
|
||||
@@ -125,7 +126,7 @@ class IntakeIssueAPIEndpoint(BaseAPIView):
|
||||
intake_id=intake.id,
|
||||
project_id=project_id,
|
||||
issue=issue,
|
||||
source=request.data.get("source", "IN-APP"),
|
||||
source=SourceType.IN_APP,
|
||||
)
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
@@ -297,7 +298,7 @@ class IntakeIssueAPIEndpoint(BaseAPIView):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=False,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
intake=str(intake_issue.id),
|
||||
)
|
||||
|
||||
|
||||
@@ -56,6 +56,8 @@ from plane.db.models import (
|
||||
from plane.settings.storage import S3Storage
|
||||
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
|
||||
from .base import BaseAPIView
|
||||
from plane.utils.host import base_host
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
|
||||
|
||||
class WorkspaceIssueAPIEndpoint(BaseAPIView):
|
||||
@@ -321,6 +323,17 @@ class IssueAPIEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
|
||||
# Send the model activity
|
||||
model_activity.delay(
|
||||
model_name="issue",
|
||||
model_id=str(serializer.data["id"]),
|
||||
requested_data=request.data,
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@@ -1048,7 +1061,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
# Get the storage metadata
|
||||
@@ -1108,7 +1121,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
current_instance=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
# Update the attachment
|
||||
|
||||
@@ -33,6 +33,7 @@ from plane.db.models import (
|
||||
|
||||
from .base import BaseAPIView
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ModuleAPIEndpoint(BaseAPIView):
|
||||
@@ -174,7 +175,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
module = Module.objects.get(pk=serializer.data["id"])
|
||||
serializer = ModuleSerializer(module)
|
||||
@@ -226,7 +227,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -280,6 +281,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps({"module_name": str(module.name)}),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
module.delete()
|
||||
# Delete the module issues
|
||||
@@ -449,6 +451,7 @@ class ModuleIssueAPIEndpoint(BaseAPIView):
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(
|
||||
|
||||
@@ -30,6 +30,7 @@ from plane.db.models import (
|
||||
)
|
||||
from plane.bgtasks.webhook_task import model_activity, webhook_activity
|
||||
from .base import BaseAPIView
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ProjectAPIEndpoint(BaseAPIView):
|
||||
@@ -171,14 +172,14 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
states = [
|
||||
{
|
||||
"name": "Backlog",
|
||||
"color": "#A3A3A3",
|
||||
"color": "#60646C",
|
||||
"sequence": 15000,
|
||||
"group": "backlog",
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"name": "Todo",
|
||||
"color": "#3A3A3A",
|
||||
"color": "#60646C",
|
||||
"sequence": 25000,
|
||||
"group": "unstarted",
|
||||
},
|
||||
@@ -190,13 +191,13 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
},
|
||||
{
|
||||
"name": "Done",
|
||||
"color": "#16A34A",
|
||||
"color": "#46A758",
|
||||
"sequence": 45000,
|
||||
"group": "completed",
|
||||
},
|
||||
{
|
||||
"name": "Cancelled",
|
||||
"color": "#EF4444",
|
||||
"color": "#9AA4BC",
|
||||
"sequence": 55000,
|
||||
"group": "cancelled",
|
||||
},
|
||||
@@ -228,7 +229,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
serializer = ProjectSerializer(project)
|
||||
@@ -238,7 +239,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The project name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
except Workspace.DoesNotExist:
|
||||
return Response(
|
||||
@@ -247,7 +248,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
except ValidationError:
|
||||
return Response(
|
||||
{"identifier": "The project identifier is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
def patch(self, request, slug, pk):
|
||||
@@ -258,9 +259,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
ProjectSerializer(project).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
intake_view = request.data.get(
|
||||
"inbox_view", request.data.get("intake_view", project.intake_view)
|
||||
)
|
||||
intake_view = request.data.get("intake_view", project.intake_view)
|
||||
|
||||
if project.archived_at:
|
||||
return Response(
|
||||
@@ -297,7 +296,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
serializer = ProjectSerializer(project)
|
||||
@@ -307,7 +306,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The project name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
except (Project.DoesNotExist, Workspace.DoesNotExist):
|
||||
return Response(
|
||||
@@ -316,7 +315,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
except ValidationError:
|
||||
return Response(
|
||||
{"identifier": "The project identifier is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
def delete(self, request, slug, pk):
|
||||
@@ -334,7 +333,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
new_value=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
current_site=request.META.get("HTTP_ORIGIN"),
|
||||
current_site=base_host(request=request, is_app=True),
|
||||
event_id=project.id,
|
||||
old_identifier=None,
|
||||
new_identifier=None,
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import APIToken, APIActivityLog
|
||||
from rest_framework import serializers
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
class APITokenSerializer(BaseSerializer):
|
||||
@@ -17,10 +19,17 @@ class APITokenSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class APITokenReadSerializer(BaseSerializer):
|
||||
is_active = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = APIToken
|
||||
exclude = ("token",)
|
||||
|
||||
def get_is_active(self, obj: APIToken) -> bool:
|
||||
if obj.expired_at is None:
|
||||
return True
|
||||
return timezone.now() < obj.expired_at
|
||||
|
||||
|
||||
class APIActivityLogSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
|
||||
@@ -25,11 +25,6 @@ class CycleWriteSerializer(BaseSerializer):
|
||||
or (self.instance and self.instance.project_id)
|
||||
or self.context.get("project_id", None)
|
||||
)
|
||||
is_start_date_end_date_equal = (
|
||||
True
|
||||
if str(data.get("start_date")) == str(data.get("end_date"))
|
||||
else False
|
||||
)
|
||||
data["start_date"] = convert_to_utc(
|
||||
date=str(data.get("start_date").date()),
|
||||
project_id=project_id,
|
||||
@@ -38,7 +33,6 @@ class CycleWriteSerializer(BaseSerializer):
|
||||
data["end_date"] = convert_to_utc(
|
||||
date=str(data.get("end_date", None).date()),
|
||||
project_id=project_id,
|
||||
is_start_date_end_date_equal=is_start_date_end_date_equal,
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
@@ -268,6 +268,20 @@ class IssueActivitySerializer(BaseSerializer):
|
||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||
source_data = serializers.SerializerMethodField()
|
||||
|
||||
def get_source_data(self, obj):
|
||||
if (
|
||||
hasattr(obj, "issue")
|
||||
and hasattr(obj.issue, "source_data")
|
||||
and obj.issue.source_data
|
||||
):
|
||||
return {
|
||||
"source": obj.issue.source_data[0].source,
|
||||
"source_email": obj.issue.source_data[0].source_email,
|
||||
"extra": obj.issue.source_data[0].extra,
|
||||
}
|
||||
return None
|
||||
|
||||
class Meta:
|
||||
model = IssueActivity
|
||||
@@ -338,8 +352,19 @@ class IssueRelationSerializer(BaseSerializer):
|
||||
"state_id",
|
||||
"priority",
|
||||
"assignee_ids",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_by",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = ["workspace", "project"]
|
||||
|
||||
|
||||
class RelatedIssueSerializer(BaseSerializer):
|
||||
@@ -369,8 +394,19 @@ class RelatedIssueSerializer(BaseSerializer):
|
||||
"state_id",
|
||||
"priority",
|
||||
"assignee_ids",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_by",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_by",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = ["workspace", "project"]
|
||||
|
||||
|
||||
class IssueAssigneeSerializer(BaseSerializer):
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from plane.db.models import State
|
||||
|
||||
|
||||
class StateSerializer(BaseSerializer):
|
||||
order = serializers.FloatField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = State
|
||||
fields = [
|
||||
@@ -18,6 +20,7 @@ class StateSerializer(BaseSerializer):
|
||||
"default",
|
||||
"description",
|
||||
"sequence",
|
||||
"order",
|
||||
]
|
||||
read_only_fields = ["workspace", "project"]
|
||||
|
||||
|
||||
@@ -6,6 +6,9 @@ from plane.app.views import (
|
||||
AnalyticViewViewset,
|
||||
SavedAnalyticEndpoint,
|
||||
ExportAnalyticsEndpoint,
|
||||
AdvanceAnalyticsEndpoint,
|
||||
AdvanceAnalyticsStatsEndpoint,
|
||||
AdvanceAnalyticsChartEndpoint,
|
||||
DefaultAnalyticsEndpoint,
|
||||
ProjectStatsEndpoint,
|
||||
)
|
||||
@@ -49,4 +52,19 @@ urlpatterns = [
|
||||
ProjectStatsEndpoint.as_view(),
|
||||
name="project-analytics",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/advance-analytics/",
|
||||
AdvanceAnalyticsEndpoint.as_view(),
|
||||
name="advance-analytics",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/advance-analytics-stats/",
|
||||
AdvanceAnalyticsStatsEndpoint.as_view(),
|
||||
name="advance-analytics-stats",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/advance-analytics-charts/",
|
||||
AdvanceAnalyticsChartEndpoint.as_view(),
|
||||
name="advance-analytics-chart",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.app.views import IntakeViewSet, IntakeIssueViewSet
|
||||
from plane.app.views import (
|
||||
IntakeViewSet,
|
||||
IntakeIssueViewSet,
|
||||
IntakeWorkItemDescriptionVersionEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
@@ -53,4 +57,14 @@ urlpatterns = [
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-work-items/<uuid:work_item_id>/description-versions/",
|
||||
IntakeWorkItemDescriptionVersionEndpoint.as_view(),
|
||||
name="intake-work-item-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-work-items/<uuid:work_item_id>/description-versions/<uuid:pk>/",
|
||||
IntakeWorkItemDescriptionVersionEndpoint.as_view(),
|
||||
name="intake-work-item-versions",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -25,7 +25,7 @@ from plane.app.views import (
|
||||
IssueAttachmentV2Endpoint,
|
||||
IssueBulkUpdateDateEndpoint,
|
||||
IssueVersionEndpoint,
|
||||
IssueDescriptionVersionEndpoint,
|
||||
WorkItemDescriptionVersionEndpoint,
|
||||
IssueMetaEndpoint,
|
||||
IssueDetailIdentifierEndpoint,
|
||||
)
|
||||
@@ -263,22 +263,22 @@ urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/versions/",
|
||||
IssueVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
name="issue-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/versions/<uuid:pk>/",
|
||||
IssueVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
name="issue-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/description-versions/",
|
||||
IssueDescriptionVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:work_item_id>/description-versions/",
|
||||
WorkItemDescriptionVersionEndpoint.as_view(),
|
||||
name="work-item-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/description-versions/<uuid:pk>/",
|
||||
IssueDescriptionVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:work_item_id>/description-versions/<uuid:pk>/",
|
||||
WorkItemDescriptionVersionEndpoint.as_view(),
|
||||
name="work-item-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/meta/",
|
||||
|
||||
@@ -144,7 +144,7 @@ from .issue.sub_issue import SubIssuesEndpoint
|
||||
|
||||
from .issue.subscriber import IssueSubscriberViewSet
|
||||
|
||||
from .issue.version import IssueVersionEndpoint, IssueDescriptionVersionEndpoint
|
||||
from .issue.version import IssueVersionEndpoint, WorkItemDescriptionVersionEndpoint
|
||||
|
||||
from .module.base import (
|
||||
ModuleViewSet,
|
||||
@@ -184,7 +184,11 @@ from .estimate.base import (
|
||||
EstimatePointEndpoint,
|
||||
)
|
||||
|
||||
from .intake.base import IntakeViewSet, IntakeIssueViewSet
|
||||
from .intake.base import (
|
||||
IntakeViewSet,
|
||||
IntakeIssueViewSet,
|
||||
IntakeWorkItemDescriptionVersionEndpoint,
|
||||
)
|
||||
|
||||
from .analytic.base import (
|
||||
AnalyticsEndpoint,
|
||||
@@ -195,6 +199,12 @@ from .analytic.base import (
|
||||
ProjectStatsEndpoint,
|
||||
)
|
||||
|
||||
from .analytic.advance import (
|
||||
AdvanceAnalyticsEndpoint,
|
||||
AdvanceAnalyticsStatsEndpoint,
|
||||
AdvanceAnalyticsChartEndpoint,
|
||||
)
|
||||
|
||||
from .notification.base import (
|
||||
NotificationViewSet,
|
||||
UnreadNotificationEndpoint,
|
||||
|
||||
571
apiserver/plane/app/views/analytic/advance.py
Normal file
571
apiserver/plane/app/views/analytic/advance.py
Normal file
@@ -0,0 +1,571 @@
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from typing import Dict, List, Any
|
||||
from django.db.models import QuerySet, Q, Count
|
||||
from django.http import HttpRequest
|
||||
from django.db.models.functions import TruncMonth
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
from plane.app.views.base import BaseAPIView
|
||||
from plane.app.permissions import ROLE, allow_permission
|
||||
from plane.db.models import (
|
||||
WorkspaceMember,
|
||||
Project,
|
||||
Issue,
|
||||
Cycle,
|
||||
Module,
|
||||
IssueView,
|
||||
ProjectPage,
|
||||
Workspace,
|
||||
CycleIssue,
|
||||
ModuleIssue,
|
||||
)
|
||||
from django.db import models
|
||||
from django.db.models import F, Case, When, Value
|
||||
from django.db.models.functions import Concat
|
||||
from plane.utils.build_chart import build_analytics_chart
|
||||
from plane.utils.date_utils import (
|
||||
get_analytics_filters,
|
||||
)
|
||||
|
||||
|
||||
class AdvanceAnalyticsBaseView(BaseAPIView):
|
||||
def initialize_workspace(self, slug: str, type: str) -> None:
|
||||
self._workspace_slug = slug
|
||||
self.filters = get_analytics_filters(
|
||||
slug=slug,
|
||||
type=type,
|
||||
user=self.request.user,
|
||||
date_filter=self.request.GET.get("date_filter", None),
|
||||
project_ids=self.request.GET.get("project_ids", None),
|
||||
)
|
||||
|
||||
|
||||
class AdvanceAnalyticsEndpoint(AdvanceAnalyticsBaseView):
|
||||
|
||||
def get_filtered_counts(self, queryset: QuerySet) -> Dict[str, int]:
|
||||
def get_filtered_count() -> int:
|
||||
if self.filters["analytics_date_range"]:
|
||||
return queryset.filter(
|
||||
created_at__gte=self.filters["analytics_date_range"]["current"][
|
||||
"gte"
|
||||
],
|
||||
created_at__lte=self.filters["analytics_date_range"]["current"][
|
||||
"lte"
|
||||
],
|
||||
).count()
|
||||
return queryset.count()
|
||||
|
||||
def get_previous_count() -> int:
|
||||
if self.filters["analytics_date_range"] and self.filters[
|
||||
"analytics_date_range"
|
||||
].get("previous"):
|
||||
return queryset.filter(
|
||||
created_at__gte=self.filters["analytics_date_range"]["previous"][
|
||||
"gte"
|
||||
],
|
||||
created_at__lte=self.filters["analytics_date_range"]["previous"][
|
||||
"lte"
|
||||
],
|
||||
).count()
|
||||
return 0
|
||||
|
||||
return {
|
||||
"count": get_filtered_count(),
|
||||
# "filter_count": get_previous_count(),
|
||||
}
|
||||
|
||||
def get_overview_data(self) -> Dict[str, Dict[str, int]]:
|
||||
return {
|
||||
"total_users": self.get_filtered_counts(
|
||||
WorkspaceMember.objects.filter(
|
||||
workspace__slug=self._workspace_slug, is_active=True
|
||||
)
|
||||
),
|
||||
"total_admins": self.get_filtered_counts(
|
||||
WorkspaceMember.objects.filter(
|
||||
workspace__slug=self._workspace_slug,
|
||||
role=ROLE.ADMIN.value,
|
||||
is_active=True,
|
||||
)
|
||||
),
|
||||
"total_members": self.get_filtered_counts(
|
||||
WorkspaceMember.objects.filter(
|
||||
workspace__slug=self._workspace_slug,
|
||||
role=ROLE.MEMBER.value,
|
||||
is_active=True,
|
||||
)
|
||||
),
|
||||
"total_guests": self.get_filtered_counts(
|
||||
WorkspaceMember.objects.filter(
|
||||
workspace__slug=self._workspace_slug,
|
||||
role=ROLE.GUEST.value,
|
||||
is_active=True,
|
||||
)
|
||||
),
|
||||
"total_projects": self.get_filtered_counts(
|
||||
Project.objects.filter(**self.filters["project_filters"])
|
||||
),
|
||||
"total_work_items": self.get_filtered_counts(
|
||||
Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
),
|
||||
"total_cycles": self.get_filtered_counts(
|
||||
Cycle.objects.filter(**self.filters["base_filters"])
|
||||
),
|
||||
"total_intake": self.get_filtered_counts(
|
||||
Issue.objects.filter(**self.filters["base_filters"]).filter(
|
||||
issue_intake__isnull=False
|
||||
)
|
||||
),
|
||||
}
|
||||
|
||||
def get_work_items_stats(
|
||||
self, cycle_id=None, module_id=None
|
||||
) -> Dict[str, Dict[str, int]]:
|
||||
"""
|
||||
Returns work item stats for the workspace, or filtered by cycle_id or module_id if provided.
|
||||
"""
|
||||
base_queryset = None
|
||||
if cycle_id is not None:
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
**self.filters["base_filters"], cycle_id=cycle_id
|
||||
).values_list("issue_id", flat=True)
|
||||
base_queryset = Issue.issue_objects.filter(id__in=cycle_issues)
|
||||
elif module_id is not None:
|
||||
module_issues = ModuleIssue.objects.filter(
|
||||
**self.filters["base_filters"], module_id=module_id
|
||||
).values_list("issue_id", flat=True)
|
||||
base_queryset = Issue.issue_objects.filter(id__in=module_issues)
|
||||
else:
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
|
||||
return {
|
||||
"total_work_items": self.get_filtered_counts(base_queryset),
|
||||
"started_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="started")
|
||||
),
|
||||
"backlog_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="backlog")
|
||||
),
|
||||
"un_started_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="unstarted")
|
||||
),
|
||||
"completed_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="completed")
|
||||
),
|
||||
}
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
|
||||
def get(self, request: HttpRequest, slug: str) -> Response:
|
||||
self.initialize_workspace(slug, type="analytics")
|
||||
tab = request.GET.get("tab", "overview")
|
||||
|
||||
if tab == "overview":
|
||||
return Response(
|
||||
self.get_overview_data(),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
elif tab == "work-items":
|
||||
# Optionally accept cycle_id or module_id as query params
|
||||
cycle_id = request.GET.get("cycle_id", None)
|
||||
module_id = request.GET.get("module_id", None)
|
||||
return Response(
|
||||
self.get_work_items_stats(cycle_id=cycle_id, module_id=module_id),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return Response({"message": "Invalid tab"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class AdvanceAnalyticsStatsEndpoint(AdvanceAnalyticsBaseView):
|
||||
def get_project_issues_stats(self) -> QuerySet:
|
||||
# Get the base queryset with workspace and project filters
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
base_queryset = base_queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
return (
|
||||
base_queryset.values("project_id", "project__name")
|
||||
.annotate(
|
||||
cancelled_work_items=Count("id", filter=Q(state__group="cancelled")),
|
||||
completed_work_items=Count("id", filter=Q(state__group="completed")),
|
||||
backlog_work_items=Count("id", filter=Q(state__group="backlog")),
|
||||
un_started_work_items=Count("id", filter=Q(state__group="unstarted")),
|
||||
started_work_items=Count("id", filter=Q(state__group="started")),
|
||||
)
|
||||
.order_by("project_id")
|
||||
)
|
||||
|
||||
def get_work_items_stats(
|
||||
self, cycle_id=None, module_id=None, peek_view=False
|
||||
) -> Dict[str, Dict[str, int]]:
|
||||
base_queryset = None
|
||||
if cycle_id is not None:
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
**self.filters["base_filters"], cycle_id=cycle_id
|
||||
).values_list("issue_id", flat=True)
|
||||
base_queryset = Issue.issue_objects.filter(id__in=cycle_issues)
|
||||
elif module_id is not None:
|
||||
module_issues = ModuleIssue.objects.filter(
|
||||
**self.filters["base_filters"], module_id=module_id
|
||||
).values_list("issue_id", flat=True)
|
||||
base_queryset = Issue.issue_objects.filter(id__in=module_issues)
|
||||
elif peek_view:
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
else:
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
return (
|
||||
base_queryset.values("project_id", "project__name")
|
||||
.annotate(
|
||||
cancelled_work_items=Count(
|
||||
"id", filter=Q(state__group="cancelled")
|
||||
),
|
||||
completed_work_items=Count(
|
||||
"id", filter=Q(state__group="completed")
|
||||
),
|
||||
backlog_work_items=Count("id", filter=Q(state__group="backlog")),
|
||||
un_started_work_items=Count(
|
||||
"id", filter=Q(state__group="unstarted")
|
||||
),
|
||||
started_work_items=Count("id", filter=Q(state__group="started")),
|
||||
)
|
||||
.order_by("project_id")
|
||||
)
|
||||
|
||||
return (
|
||||
base_queryset.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.annotate(
|
||||
avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(
|
||||
assignees__avatar_asset__isnull=True, then="assignees__avatar"
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.values("display_name", "assignee_id", "avatar_url")
|
||||
.annotate(
|
||||
cancelled_work_items=Count(
|
||||
"id", filter=Q(state__group="cancelled"), distinct=True
|
||||
),
|
||||
completed_work_items=Count(
|
||||
"id", filter=Q(state__group="completed"), distinct=True
|
||||
),
|
||||
backlog_work_items=Count(
|
||||
"id", filter=Q(state__group="backlog"), distinct=True
|
||||
),
|
||||
un_started_work_items=Count(
|
||||
"id", filter=Q(state__group="unstarted"), distinct=True
|
||||
),
|
||||
started_work_items=Count(
|
||||
"id", filter=Q(state__group="started"), distinct=True
|
||||
),
|
||||
)
|
||||
.order_by("display_name")
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
|
||||
def get(self, request: HttpRequest, slug: str) -> Response:
|
||||
self.initialize_workspace(slug, type="chart")
|
||||
type = request.GET.get("type", "work-items")
|
||||
|
||||
if type == "work-items":
|
||||
# Optionally accept cycle_id or module_id as query params
|
||||
cycle_id = request.GET.get("cycle_id", None)
|
||||
module_id = request.GET.get("module_id", None)
|
||||
peek_view = request.GET.get("peek_view", False)
|
||||
return Response(
|
||||
self.get_work_items_stats(
|
||||
cycle_id=cycle_id, module_id=module_id, peek_view=peek_view
|
||||
),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response({"message": "Invalid type"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class AdvanceAnalyticsChartEndpoint(AdvanceAnalyticsBaseView):
|
||||
def project_chart(self) -> List[Dict[str, Any]]:
|
||||
# Get the base queryset with workspace and project filters
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
date_filter = {}
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
date_filter = {
|
||||
"created_at__date__gte": start_date,
|
||||
"created_at__date__lte": end_date,
|
||||
}
|
||||
|
||||
total_work_items = base_queryset.filter(**date_filter).count()
|
||||
total_cycles = Cycle.objects.filter(
|
||||
**self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
total_modules = Module.objects.filter(
|
||||
**self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
total_intake = Issue.objects.filter(
|
||||
issue_intake__isnull=False, **self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
total_members = WorkspaceMember.objects.filter(
|
||||
workspace__slug=self._workspace_slug, is_active=True, **date_filter
|
||||
).count()
|
||||
total_pages = ProjectPage.objects.filter(
|
||||
**self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
total_views = IssueView.objects.filter(
|
||||
**self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
|
||||
data = {
|
||||
"work_items": total_work_items,
|
||||
"cycles": total_cycles,
|
||||
"modules": total_modules,
|
||||
"intake": total_intake,
|
||||
"members": total_members,
|
||||
"pages": total_pages,
|
||||
"views": total_views,
|
||||
}
|
||||
|
||||
return [
|
||||
{
|
||||
"key": key,
|
||||
"name": key.replace("_", " ").title(),
|
||||
"count": value or 0,
|
||||
}
|
||||
for key, value in data.items()
|
||||
]
|
||||
|
||||
def work_item_completion_chart(
|
||||
self, cycle_id=None, module_id=None, peek_view=False
|
||||
) -> Dict[str, Any]:
|
||||
# Get the base queryset
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
.select_related("workspace", "state", "parent")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_module__module", "issue_cycle__cycle"
|
||||
)
|
||||
)
|
||||
|
||||
if cycle_id is not None and peek_view:
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
**self.filters["base_filters"], cycle_id=cycle_id
|
||||
).values_list("issue_id", flat=True)
|
||||
cycle = Cycle.objects.filter(id=cycle_id).first()
|
||||
if cycle and cycle.start_date:
|
||||
start_date = cycle.start_date.date()
|
||||
end_date = cycle.end_date.date()
|
||||
else:
|
||||
return {"data": [], "schema": {}}
|
||||
queryset = cycle_issues
|
||||
elif module_id is not None and peek_view:
|
||||
module_issues = ModuleIssue.objects.filter(
|
||||
**self.filters["base_filters"], module_id=module_id
|
||||
).values_list("issue_id", flat=True)
|
||||
module = Module.objects.filter(id=module_id).first()
|
||||
if module and module.start_date:
|
||||
start_date = module.start_date
|
||||
end_date = module.target_date
|
||||
else:
|
||||
return {"data": [], "schema": {}}
|
||||
queryset = module_issues
|
||||
elif peek_view:
|
||||
project_ids_str = self.request.GET.get("project_ids")
|
||||
if project_ids_str:
|
||||
project_id_list = [
|
||||
pid.strip() for pid in project_ids_str.split(",") if pid.strip()
|
||||
]
|
||||
else:
|
||||
project_id_list = []
|
||||
return {"data": [], "schema": {}}
|
||||
project_id = project_id_list[0]
|
||||
project = Project.objects.filter(id=project_id).first()
|
||||
if project.created_at:
|
||||
start_date = project.created_at.date().replace(day=1)
|
||||
else:
|
||||
return {"data": [], "schema": {}}
|
||||
else:
|
||||
workspace = Workspace.objects.get(slug=self._workspace_slug)
|
||||
start_date = workspace.created_at.date().replace(day=1)
|
||||
|
||||
if cycle_id or module_id:
|
||||
# Get daily stats with optimized query
|
||||
daily_stats = (
|
||||
queryset.values("created_at__date")
|
||||
.annotate(
|
||||
created_count=Count("id"),
|
||||
completed_count=Count(
|
||||
"id", filter=Q(issue__state__group="completed")
|
||||
),
|
||||
)
|
||||
.order_by("created_at__date")
|
||||
)
|
||||
|
||||
# Create a dictionary of existing stats with summed counts
|
||||
stats_dict = {
|
||||
stat["created_at__date"].strftime("%Y-%m-%d"): {
|
||||
"created_count": stat["created_count"],
|
||||
"completed_count": stat["completed_count"],
|
||||
}
|
||||
for stat in daily_stats
|
||||
}
|
||||
|
||||
# Generate data for all days in the range
|
||||
data = []
|
||||
current_date = start_date
|
||||
while current_date <= end_date:
|
||||
date_str = current_date.strftime("%Y-%m-%d")
|
||||
stats = stats_dict.get(
|
||||
date_str, {"created_count": 0, "completed_count": 0}
|
||||
)
|
||||
data.append(
|
||||
{
|
||||
"key": date_str,
|
||||
"name": date_str,
|
||||
"count": stats["created_count"] + stats["completed_count"],
|
||||
"completed_issues": stats["completed_count"],
|
||||
"created_issues": stats["created_count"],
|
||||
}
|
||||
)
|
||||
current_date += timedelta(days=1)
|
||||
else:
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
queryset = queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
# Annotate by month and count
|
||||
monthly_stats = (
|
||||
queryset.annotate(month=TruncMonth("created_at"))
|
||||
.values("month")
|
||||
.annotate(
|
||||
created_count=Count("id"),
|
||||
completed_count=Count("id", filter=Q(state__group="completed")),
|
||||
)
|
||||
.order_by("month")
|
||||
)
|
||||
|
||||
# Create dictionary of month -> counts
|
||||
stats_dict = {
|
||||
stat["month"].strftime("%Y-%m-%d"): {
|
||||
"created_count": stat["created_count"],
|
||||
"completed_count": stat["completed_count"],
|
||||
}
|
||||
for stat in monthly_stats
|
||||
}
|
||||
|
||||
# Generate monthly data (ensure months with 0 count are included)
|
||||
data = []
|
||||
# include the current date at the end
|
||||
end_date = timezone.now().date()
|
||||
last_month = end_date.replace(day=1)
|
||||
current_month = start_date
|
||||
|
||||
while current_month <= last_month:
|
||||
date_str = current_month.strftime("%Y-%m-%d")
|
||||
stats = stats_dict.get(
|
||||
date_str, {"created_count": 0, "completed_count": 0}
|
||||
)
|
||||
data.append(
|
||||
{
|
||||
"key": date_str,
|
||||
"name": date_str,
|
||||
"count": stats["created_count"],
|
||||
"completed_issues": stats["completed_count"],
|
||||
"created_issues": stats["created_count"],
|
||||
}
|
||||
)
|
||||
# Move to next month
|
||||
if current_month.month == 12:
|
||||
current_month = current_month.replace(
|
||||
year=current_month.year + 1, month=1
|
||||
)
|
||||
else:
|
||||
current_month = current_month.replace(month=current_month.month + 1)
|
||||
|
||||
schema = {
|
||||
"completed_issues": "completed_issues",
|
||||
"created_issues": "created_issues",
|
||||
}
|
||||
|
||||
return {"data": data, "schema": schema}
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
|
||||
def get(self, request: HttpRequest, slug: str) -> Response:
|
||||
self.initialize_workspace(slug, type="chart")
|
||||
type = request.GET.get("type", "projects")
|
||||
group_by = request.GET.get("group_by", None)
|
||||
x_axis = request.GET.get("x_axis", "PRIORITY")
|
||||
cycle_id = request.GET.get("cycle_id", None)
|
||||
module_id = request.GET.get("module_id", None)
|
||||
|
||||
if type == "projects":
|
||||
return Response(self.project_chart(), status=status.HTTP_200_OK)
|
||||
|
||||
elif type == "custom-work-items":
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
.select_related("workspace", "state", "parent")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_module__module", "issue_cycle__cycle"
|
||||
)
|
||||
)
|
||||
|
||||
# Apply cycle/module filters if present
|
||||
if cycle_id is not None:
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
**self.filters["base_filters"], cycle_id=cycle_id
|
||||
).values_list("issue_id", flat=True)
|
||||
queryset = queryset.filter(id__in=cycle_issues)
|
||||
|
||||
elif module_id is not None:
|
||||
module_issues = ModuleIssue.objects.filter(
|
||||
**self.filters["base_filters"], module_id=module_id
|
||||
).values_list("issue_id", flat=True)
|
||||
queryset = queryset.filter(id__in=module_issues)
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
queryset = queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
return Response(
|
||||
build_analytics_chart(queryset, x_axis, group_by),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
elif type == "work-items":
|
||||
# Optionally accept cycle_id or module_id as query params
|
||||
cycle_id = request.GET.get("cycle_id", None)
|
||||
module_id = request.GET.get("module_id", None)
|
||||
peek_view = request.GET.get("peek_view", False)
|
||||
return Response(
|
||||
self.work_item_completion_chart(
|
||||
cycle_id=cycle_id, module_id=module_id, peek_view=peek_view
|
||||
),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response({"message": "Invalid type"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -9,11 +9,11 @@ from rest_framework import status
|
||||
from .base import BaseAPIView
|
||||
from plane.db.models import APIToken, Workspace
|
||||
from plane.app.serializers import APITokenSerializer, APITokenReadSerializer
|
||||
from plane.app.permissions import WorkspaceOwnerPermission
|
||||
from plane.app.permissions import WorkspaceEntityPermission
|
||||
|
||||
|
||||
class ApiTokenEndpoint(BaseAPIView):
|
||||
permission_classes = [WorkspaceOwnerPermission]
|
||||
permission_classes = [WorkspaceEntityPermission]
|
||||
|
||||
def post(self, request, slug):
|
||||
label = request.data.get("label", str(uuid4().hex))
|
||||
@@ -68,7 +68,7 @@ class ApiTokenEndpoint(BaseAPIView):
|
||||
|
||||
|
||||
class ServiceApiTokenEndpoint(BaseAPIView):
|
||||
permission_classes = [WorkspaceOwnerPermission]
|
||||
permission_classes = [WorkspaceEntityPermission]
|
||||
|
||||
def post(self, request, slug):
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
@@ -137,7 +137,7 @@ class UserAssetsV2Endpoint(BaseAPIView):
|
||||
if type not in allowed_types:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
|
||||
"error": "Invalid file type. Only JPEG, PNG, WebP, JPG and GIF files are allowed.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -351,7 +351,7 @@ class WorkspaceFileAssetEndpoint(BaseAPIView):
|
||||
if type not in allowed_types:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
|
||||
"error": "Invalid file type. Only JPEG, PNG, WebP, JPG and GIF files are allowed.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -552,7 +552,7 @@ class ProjectAssetEndpoint(BaseAPIView):
|
||||
if type not in allowed_types:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
|
||||
"error": "Invalid file type. Only JPEG, PNG, WebP, JPG and GIF files are allowed.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -683,7 +683,7 @@ class ProjectBulkAssetEndpoint(BaseAPIView):
|
||||
# For some cases, the bulk api is called after the issue is deleted creating
|
||||
# an integrity error
|
||||
try:
|
||||
assets.update(issue_id=entity_id)
|
||||
assets.update(issue_id=entity_id, project_id=project_id)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
|
||||
@@ -51,8 +51,7 @@ from plane.db.models import (
|
||||
)
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
|
||||
# Module imports
|
||||
from plane.utils.host import base_host
|
||||
from .. import BaseAPIView, BaseViewSet
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from plane.utils.timezone_converter import convert_to_utc, user_timezone_converter
|
||||
@@ -118,6 +117,7 @@ class CycleViewSet(BaseViewSet):
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -130,6 +130,7 @@ class CycleViewSet(BaseViewSet):
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -142,6 +143,7 @@ class CycleViewSet(BaseViewSet):
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -267,9 +269,7 @@ class CycleViewSet(BaseViewSet):
|
||||
"created_by",
|
||||
)
|
||||
datetime_fields = ["start_date", "end_date"]
|
||||
data = user_timezone_converter(
|
||||
data, datetime_fields, request.user.user_timezone
|
||||
)
|
||||
data = user_timezone_converter(data, datetime_fields, project_timezone)
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
@@ -318,9 +318,13 @@ class CycleViewSet(BaseViewSet):
|
||||
.first()
|
||||
)
|
||||
|
||||
# Fetch the project timezone
|
||||
project = Project.objects.get(id=self.kwargs.get("project_id"))
|
||||
project_timezone = project.timezone
|
||||
|
||||
datetime_fields = ["start_date", "end_date"]
|
||||
cycle = user_timezone_converter(
|
||||
cycle, datetime_fields, request.user.user_timezone
|
||||
cycle, datetime_fields, project_timezone
|
||||
)
|
||||
|
||||
# Send the model activity
|
||||
@@ -331,7 +335,7 @@ class CycleViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(cycle, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -407,10 +411,12 @@ class CycleViewSet(BaseViewSet):
|
||||
"created_by",
|
||||
).first()
|
||||
|
||||
# Fetch the project timezone
|
||||
project = Project.objects.get(id=self.kwargs.get("project_id"))
|
||||
project_timezone = project.timezone
|
||||
|
||||
datetime_fields = ["start_date", "end_date"]
|
||||
cycle = user_timezone_converter(
|
||||
cycle, datetime_fields, request.user.user_timezone
|
||||
)
|
||||
cycle = user_timezone_converter(cycle, datetime_fields, project_timezone)
|
||||
|
||||
# Send the model activity
|
||||
model_activity.delay(
|
||||
@@ -420,7 +426,7 @@ class CycleViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(cycle, status=status.HTTP_200_OK)
|
||||
@@ -480,10 +486,11 @@ class CycleViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
queryset = queryset.first()
|
||||
# Fetch the project timezone
|
||||
project = Project.objects.get(id=self.kwargs.get("project_id"))
|
||||
project_timezone = project.timezone
|
||||
datetime_fields = ["start_date", "end_date"]
|
||||
data = user_timezone_converter(
|
||||
data, datetime_fields, request.user.user_timezone
|
||||
)
|
||||
data = user_timezone_converter(data, datetime_fields, project_timezone)
|
||||
|
||||
recent_visited_task.delay(
|
||||
slug=slug,
|
||||
@@ -532,7 +539,7 @@ class CycleViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
# TODO: Soft delete the cycle break the onetoone relationship with cycle issue
|
||||
cycle.delete()
|
||||
@@ -566,16 +573,12 @@ class CycleDateCheckEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
is_start_date_end_date_equal = (
|
||||
True if str(start_date) == str(end_date) else False
|
||||
)
|
||||
start_date = convert_to_utc(
|
||||
date=str(start_date), project_id=project_id, is_start_date=True
|
||||
)
|
||||
end_date = convert_to_utc(
|
||||
date=str(end_date),
|
||||
project_id=project_id,
|
||||
is_start_date_end_date_equal=is_start_date_end_date_equal,
|
||||
)
|
||||
|
||||
# Check if any cycle intersects in the given interval
|
||||
@@ -660,6 +663,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -724,6 +728,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
)
|
||||
)
|
||||
)
|
||||
old_cycle = old_cycle.first()
|
||||
|
||||
estimate_type = Project.objects.filter(
|
||||
workspace__slug=slug,
|
||||
@@ -842,7 +847,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
estimate_completion_chart = burndown_plot(
|
||||
queryset=old_cycle.first(),
|
||||
queryset=old_cycle,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="points",
|
||||
@@ -989,7 +994,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
|
||||
# Pass the new_cycle queryset to burndown_plot
|
||||
completion_chart = burndown_plot(
|
||||
queryset=old_cycle.first(),
|
||||
queryset=old_cycle,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="issues",
|
||||
@@ -1001,12 +1006,12 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
).first()
|
||||
|
||||
current_cycle.progress_snapshot = {
|
||||
"total_issues": old_cycle.first().total_issues,
|
||||
"completed_issues": old_cycle.first().completed_issues,
|
||||
"cancelled_issues": old_cycle.first().cancelled_issues,
|
||||
"started_issues": old_cycle.first().started_issues,
|
||||
"unstarted_issues": old_cycle.first().unstarted_issues,
|
||||
"backlog_issues": old_cycle.first().backlog_issues,
|
||||
"total_issues": old_cycle.total_issues,
|
||||
"completed_issues": old_cycle.completed_issues,
|
||||
"cancelled_issues": old_cycle.cancelled_issues,
|
||||
"started_issues": old_cycle.started_issues,
|
||||
"unstarted_issues": old_cycle.unstarted_issues,
|
||||
"backlog_issues": old_cycle.backlog_issues,
|
||||
"distribution": {
|
||||
"labels": label_distribution_data,
|
||||
"assignees": assignee_distribution_data,
|
||||
@@ -1071,7 +1076,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
||||
@@ -1114,6 +1119,14 @@ class CycleUserPropertiesEndpoint(BaseAPIView):
|
||||
class CycleProgressEndpoint(BaseAPIView):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def get(self, request, slug, project_id, cycle_id):
|
||||
|
||||
cycle = Cycle.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, id=cycle_id
|
||||
).first()
|
||||
if not cycle:
|
||||
return Response(
|
||||
{"error": "Cycle not found"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
aggregate_estimates = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
@@ -1164,53 +1177,60 @@ class CycleProgressEndpoint(BaseAPIView):
|
||||
),
|
||||
)
|
||||
)
|
||||
if cycle.progress_snapshot:
|
||||
backlog_issues = cycle.progress_snapshot.get("backlog_issues", 0)
|
||||
unstarted_issues = cycle.progress_snapshot.get("unstarted_issues", 0)
|
||||
started_issues = cycle.progress_snapshot.get("started_issues", 0)
|
||||
cancelled_issues = cycle.progress_snapshot.get("cancelled_issues", 0)
|
||||
completed_issues = cycle.progress_snapshot.get("completed_issues", 0)
|
||||
total_issues = cycle.progress_snapshot.get("total_issues", 0)
|
||||
else:
|
||||
backlog_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="backlog",
|
||||
).count()
|
||||
|
||||
backlog_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="backlog",
|
||||
).count()
|
||||
unstarted_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="unstarted",
|
||||
).count()
|
||||
|
||||
unstarted_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="unstarted",
|
||||
).count()
|
||||
started_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="started",
|
||||
).count()
|
||||
|
||||
started_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="started",
|
||||
).count()
|
||||
cancelled_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="cancelled",
|
||||
).count()
|
||||
|
||||
cancelled_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="cancelled",
|
||||
).count()
|
||||
completed_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="completed",
|
||||
).count()
|
||||
|
||||
completed_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="completed",
|
||||
).count()
|
||||
|
||||
total_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
).count()
|
||||
total_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
).count()
|
||||
|
||||
return Response(
|
||||
{
|
||||
@@ -1271,6 +1291,25 @@ class CycleAnalyticsEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# this will tell whether the issues were transferred to the new cycle
|
||||
"""
|
||||
if the issues were transferred to the new cycle, then the progress_snapshot will be present
|
||||
return the progress_snapshot data in the analytics for each date
|
||||
|
||||
else issues were not transferred to the new cycle then generate the stats from the cycle isssue bridge tables
|
||||
"""
|
||||
|
||||
if cycle.progress_snapshot:
|
||||
distribution = cycle.progress_snapshot.get("distribution", {})
|
||||
return Response(
|
||||
{
|
||||
"labels": distribution.get("labels", []),
|
||||
"assignees": distribution.get("assignees", []),
|
||||
"completion_chart": distribution.get("completion_chart", {}),
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
estimate_type = Project.objects.filter(
|
||||
workspace__slug=slug,
|
||||
pk=project_id,
|
||||
|
||||
@@ -27,7 +27,7 @@ from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPaginator
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class CycleIssueViewSet(BaseViewSet):
|
||||
serializer_class = CycleIssueSerializer
|
||||
@@ -291,7 +291,7 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
|
||||
|
||||
@@ -317,7 +317,7 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
cycle_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -27,16 +27,24 @@ from plane.db.models import (
|
||||
Project,
|
||||
ProjectMember,
|
||||
CycleIssue,
|
||||
IssueDescriptionVersion,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
IssueCreateSerializer,
|
||||
IssueSerializer,
|
||||
IssueDetailSerializer,
|
||||
IntakeSerializer,
|
||||
IntakeIssueSerializer,
|
||||
IntakeIssueDetailSerializer,
|
||||
IssueDescriptionVersionDetailSerializer,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.bgtasks.issue_description_version_task import issue_description_version_task
|
||||
from plane.app.views.base import BaseAPIView
|
||||
from plane.utils.timezone_converter import user_timezone_converter
|
||||
from plane.utils.global_paginator import paginate
|
||||
from plane.utils.host import base_host
|
||||
from plane.db.models.intake import SourceType
|
||||
|
||||
|
||||
class IntakeViewSet(BaseViewSet):
|
||||
@@ -87,7 +95,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
serializer_class = IntakeIssueSerializer
|
||||
model = IntakeIssue
|
||||
|
||||
filterset_fields = ["statulls"]
|
||||
filterset_fields = ["status"]
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
@@ -218,7 +226,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
role=5,
|
||||
role=ROLE.GUEST.value,
|
||||
is_active=True,
|
||||
).exists()
|
||||
and not project.guest_view_all_features
|
||||
@@ -271,7 +279,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
intake_id=intake_id.id,
|
||||
project_id=project_id,
|
||||
issue_id=serializer.data["id"],
|
||||
source=request.data.get("source", "IN-APP"),
|
||||
source=SourceType.IN_APP,
|
||||
)
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
@@ -283,9 +291,16 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
intake=str(intake_issue.id),
|
||||
)
|
||||
# updated issue description version
|
||||
issue_description_version_task.delay(
|
||||
updated_issue=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
issue_id=str(serializer.data["id"]),
|
||||
user_id=request.user.id,
|
||||
is_creating=True,
|
||||
)
|
||||
intake_issue = (
|
||||
IntakeIssue.objects.select_related("issue")
|
||||
.prefetch_related("issue__labels", "issue__assignees")
|
||||
@@ -385,13 +400,15 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
),
|
||||
"description": issue_data.get("description", issue.description),
|
||||
}
|
||||
current_instance = json.dumps(
|
||||
IssueDetailSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
issue_serializer = IssueCreateSerializer(
|
||||
issue, data=issue_data, partial=True, context={"project_id": project_id}
|
||||
)
|
||||
|
||||
if issue_serializer.is_valid():
|
||||
current_instance = issue
|
||||
# Log all the updates
|
||||
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
|
||||
if issue is not None:
|
||||
@@ -401,15 +418,18 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(current_instance).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
intake=str(intake_issue.id),
|
||||
)
|
||||
# updated issue description version
|
||||
issue_description_version_task.delay(
|
||||
updated_issue=current_instance,
|
||||
issue_id=str(pk),
|
||||
user_id=request.user.id,
|
||||
)
|
||||
issue_serializer.save()
|
||||
else:
|
||||
return Response(
|
||||
@@ -467,7 +487,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=False,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
intake=(intake_issue.id),
|
||||
)
|
||||
|
||||
@@ -549,7 +569,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
role=5,
|
||||
role=ROLE.GUEST.value,
|
||||
is_active=True,
|
||||
).exists()
|
||||
and not project.guest_view_all_features
|
||||
@@ -557,7 +577,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
):
|
||||
return Response(
|
||||
{"error": "You are not allowed to view this issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
issue = IntakeIssueDetailSerializer(intake_issue).data
|
||||
return Response(issue, status=status.HTTP_200_OK)
|
||||
@@ -584,3 +604,80 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
|
||||
intake_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class IntakeWorkItemDescriptionVersionEndpoint(BaseAPIView):
|
||||
def process_paginated_result(self, fields, results, timezone):
|
||||
paginated_data = results.values(*fields)
|
||||
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
paginated_data = user_timezone_converter(
|
||||
paginated_data, datetime_fields, timezone
|
||||
)
|
||||
|
||||
return paginated_data
|
||||
|
||||
@allow_permission(allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def get(self, request, slug, project_id, work_item_id, pk=None):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
issue = Issue.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=work_item_id
|
||||
)
|
||||
|
||||
if (
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
role=ROLE.GUEST.value,
|
||||
is_active=True,
|
||||
).exists()
|
||||
and not project.guest_view_all_features
|
||||
and not issue.created_by == request.user
|
||||
):
|
||||
return Response(
|
||||
{"error": "You are not allowed to view this issue"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
if pk:
|
||||
issue_description_version = IssueDescriptionVersion.objects.get(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_id=work_item_id,
|
||||
pk=pk,
|
||||
)
|
||||
|
||||
serializer = IssueDescriptionVersionDetailSerializer(
|
||||
issue_description_version
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
cursor = request.GET.get("cursor", None)
|
||||
|
||||
required_fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"last_saved_at",
|
||||
"owned_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
]
|
||||
|
||||
issue_description_versions_queryset = IssueDescriptionVersion.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, issue_id=work_item_id
|
||||
)
|
||||
|
||||
paginated_data = paginate(
|
||||
base_queryset=issue_description_versions_queryset,
|
||||
queryset=issue_description_versions_queryset,
|
||||
cursor=cursor,
|
||||
on_result=lambda results: self.process_paginated_result(
|
||||
required_fields, results, request.user.user_timezone
|
||||
),
|
||||
)
|
||||
return Response(paginated_data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -14,7 +14,7 @@ from rest_framework import status
|
||||
from .. import BaseAPIView
|
||||
from plane.app.serializers import IssueActivitySerializer, IssueCommentSerializer
|
||||
from plane.app.permissions import ProjectEntityPermission, allow_permission, ROLE
|
||||
from plane.db.models import IssueActivity, IssueComment, CommentReaction
|
||||
from plane.db.models import IssueActivity, IssueComment, CommentReaction, IntakeIssue
|
||||
|
||||
|
||||
class IssueActivityEndpoint(BaseAPIView):
|
||||
@@ -57,13 +57,22 @@ class IssueActivityEndpoint(BaseAPIView):
|
||||
)
|
||||
)
|
||||
)
|
||||
issue_activities = IssueActivitySerializer(issue_activities, many=True).data
|
||||
issue_comments = IssueCommentSerializer(issue_comments, many=True).data
|
||||
|
||||
if request.GET.get("activity_type", None) == "issue-property":
|
||||
issue_activities = issue_activities.prefetch_related(
|
||||
Prefetch(
|
||||
"issue__issue_intake",
|
||||
queryset=IntakeIssue.objects.only(
|
||||
"source_email", "source", "extra"
|
||||
),
|
||||
to_attr="source_data",
|
||||
)
|
||||
)
|
||||
issue_activities = IssueActivitySerializer(issue_activities, many=True).data
|
||||
return Response(issue_activities, status=status.HTTP_200_OK)
|
||||
|
||||
if request.GET.get("activity_type", None) == "issue-comment":
|
||||
issue_comments = IssueCommentSerializer(issue_comments, many=True).data
|
||||
return Response(issue_comments, status=status.HTTP_200_OK)
|
||||
|
||||
result_list = sorted(
|
||||
|
||||
@@ -37,7 +37,7 @@ from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPaginator
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.utils.error_codes import ERROR_CODES
|
||||
|
||||
from plane.utils.host import base_host
|
||||
# Module imports
|
||||
from .. import BaseViewSet, BaseAPIView
|
||||
|
||||
@@ -259,7 +259,7 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue.archived_at = timezone.now().date()
|
||||
issue.save()
|
||||
@@ -287,7 +287,7 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue.archived_at = None
|
||||
issue.save()
|
||||
@@ -333,7 +333,7 @@ class BulkArchiveIssuesEndpoint(BaseAPIView):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue.archived_at = timezone.now().date()
|
||||
bulk_archive_issues.append(issue)
|
||||
|
||||
@@ -21,7 +21,7 @@ from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.settings.storage import S3Storage
|
||||
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueAttachmentEndpoint(BaseAPIView):
|
||||
serializer_class = IssueAttachmentSerializer
|
||||
@@ -48,7 +48,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
current_instance=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -67,7 +67,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -155,7 +155,7 @@ class IssueAttachmentV2Endpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -213,7 +213,7 @@ class IssueAttachmentV2Endpoint(BaseAPIView):
|
||||
current_instance=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
# Update the attachment
|
||||
|
||||
@@ -45,6 +45,7 @@ from plane.db.models import (
|
||||
ProjectMember,
|
||||
CycleIssue,
|
||||
UserRecentVisit,
|
||||
ModuleIssue,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
@@ -60,6 +61,7 @@ from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
from plane.utils.global_paginator import paginate
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from plane.bgtasks.issue_description_version_task import issue_description_version_task
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueListEndpoint(BaseAPIView):
|
||||
@@ -378,7 +380,7 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue = (
|
||||
issue_queryset_grouper(
|
||||
@@ -428,7 +430,7 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
# updated issue description version
|
||||
issue_description_version_task.delay(
|
||||
@@ -564,7 +566,7 @@ class IssueViewSet(BaseViewSet):
|
||||
):
|
||||
return Response(
|
||||
{"error": "You are not allowed to view this issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
recent_visited_task.delay(
|
||||
@@ -631,7 +633,7 @@ class IssueViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
current_instance = json.dumps(
|
||||
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
IssueDetailSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
|
||||
@@ -649,7 +651,7 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
model_activity.delay(
|
||||
model_name="issue",
|
||||
@@ -658,7 +660,7 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
# updated issue description version
|
||||
issue_description_version_task.delay(
|
||||
@@ -690,7 +692,8 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance={},
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
subscriber=False,
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -738,6 +741,13 @@ class BulkDeleteIssuesEndpoint(BaseAPIView):
|
||||
|
||||
total_issues = len(issues)
|
||||
|
||||
# First, delete all related cycle issues
|
||||
CycleIssue.objects.filter(issue_id__in=issue_ids).delete()
|
||||
|
||||
# Then, delete all related module issues
|
||||
ModuleIssue.objects.filter(issue_id__in=issue_ids).delete()
|
||||
|
||||
# Finally, delete the issues themselves
|
||||
issues.delete()
|
||||
|
||||
return Response(
|
||||
@@ -1025,9 +1035,17 @@ class IssueBulkUpdateDateEndpoint(BaseAPIView):
|
||||
"""
|
||||
Validate that start date is before target date.
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
start = new_start or current_start
|
||||
target = new_target or current_target
|
||||
|
||||
# Convert string dates to datetime objects if they're strings
|
||||
if isinstance(start, str):
|
||||
start = datetime.strptime(start, "%Y-%m-%d").date()
|
||||
if isinstance(target, str):
|
||||
target = datetime.strptime(target, "%Y-%m-%d").date()
|
||||
|
||||
if start and target and start > target:
|
||||
return False
|
||||
return True
|
||||
@@ -1269,7 +1287,7 @@ class IssueDetailIdentifierEndpoint(BaseAPIView):
|
||||
):
|
||||
return Response(
|
||||
{"error": "You are not allowed to view this issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
recent_visited_task.delay(
|
||||
|
||||
@@ -17,7 +17,7 @@ from plane.app.serializers import IssueCommentSerializer, CommentReactionSeriali
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.db.models import IssueComment, ProjectMember, CommentReaction, Project, Issue
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueCommentViewSet(BaseViewSet):
|
||||
serializer_class = IssueCommentSerializer
|
||||
@@ -87,7 +87,7 @@ class IssueCommentViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -105,7 +105,13 @@ class IssueCommentViewSet(BaseViewSet):
|
||||
issue_comment, data=request.data, partial=True
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
if (
|
||||
"comment_html" in request.data
|
||||
and request.data["comment_html"] != issue_comment.comment_html
|
||||
):
|
||||
serializer.save(edited_at=timezone.now())
|
||||
else:
|
||||
serializer.save()
|
||||
issue_activity.delay(
|
||||
type="comment.activity.updated",
|
||||
requested_data=requested_data,
|
||||
@@ -115,7 +121,7 @@ class IssueCommentViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -138,7 +144,7 @@ class IssueCommentViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -182,7 +188,7 @@ class CommentReactionViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -216,7 +222,7 @@ class CommentReactionViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
comment_reaction.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -15,7 +15,7 @@ from plane.app.serializers import IssueLinkSerializer
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import IssueLink
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueLinkViewSet(BaseViewSet):
|
||||
permission_classes = [ProjectEntityPermission]
|
||||
@@ -52,7 +52,7 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -77,7 +77,7 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -98,7 +98,7 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue_link.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -15,7 +15,7 @@ from plane.app.serializers import IssueReactionSerializer
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.db.models import IssueReaction
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueReactionViewSet(BaseViewSet):
|
||||
serializer_class = IssueReactionSerializer
|
||||
@@ -53,7 +53,7 @@ class IssueReactionViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -78,7 +78,7 @@ class IssueReactionViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue_reaction.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -27,7 +27,7 @@ from plane.db.models import (
|
||||
)
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_relation_mapper import get_actual_relation
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueRelationViewSet(BaseViewSet):
|
||||
serializer_class = IssueRelationSerializer
|
||||
@@ -253,7 +253,7 @@ class IssueRelationViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
if relation_type in ["blocking", "start_after", "finish_after"]:
|
||||
@@ -290,6 +290,6 @@ class IssueRelationViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -22,7 +22,8 @@ from plane.db.models import Issue, IssueLink, FileAsset, CycleIssue
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.timezone_converter import user_timezone_converter
|
||||
from collections import defaultdict
|
||||
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
|
||||
class SubIssuesEndpoint(BaseAPIView):
|
||||
permission_classes = [ProjectEntityPermission]
|
||||
@@ -102,6 +103,15 @@ class SubIssuesEndpoint(BaseAPIView):
|
||||
.order_by("-created_at")
|
||||
)
|
||||
|
||||
# Ordering
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
group_by = request.GET.get("group_by", False)
|
||||
|
||||
if order_by_param:
|
||||
sub_issues, order_by_param = order_issue_queryset(
|
||||
sub_issues, order_by_param
|
||||
)
|
||||
|
||||
# create's a dict with state group name with their respective issue id's
|
||||
result = defaultdict(list)
|
||||
for sub_issue in sub_issues:
|
||||
@@ -138,6 +148,26 @@ class SubIssuesEndpoint(BaseAPIView):
|
||||
sub_issues = user_timezone_converter(
|
||||
sub_issues, datetime_fields, request.user.user_timezone
|
||||
)
|
||||
# Grouping
|
||||
if group_by:
|
||||
result_dict = defaultdict(list)
|
||||
|
||||
for issue in sub_issues:
|
||||
if group_by == "assignees__ids":
|
||||
if issue["assignee_ids"]:
|
||||
assignee_ids = issue["assignee_ids"]
|
||||
for assignee_id in assignee_ids:
|
||||
result_dict[str(assignee_id)].append(issue)
|
||||
elif issue["assignee_ids"] == []:
|
||||
result_dict["None"].append(issue)
|
||||
|
||||
elif group_by:
|
||||
result_dict[str(issue[group_by])].append(issue)
|
||||
|
||||
return Response(
|
||||
{"sub_issues": result_dict, "state_distribution": result},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return Response(
|
||||
{"sub_issues": sub_issues, "state_distribution": result},
|
||||
status=status.HTTP_200_OK,
|
||||
@@ -176,7 +206,7 @@ class SubIssuesEndpoint(BaseAPIView):
|
||||
current_instance=json.dumps({"parent": str(sub_issue_id)}),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for sub_issue_id in sub_issue_ids
|
||||
]
|
||||
|
||||
@@ -3,7 +3,13 @@ from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import IssueVersion, IssueDescriptionVersion
|
||||
from plane.db.models import (
|
||||
IssueVersion,
|
||||
IssueDescriptionVersion,
|
||||
Project,
|
||||
ProjectMember,
|
||||
Issue,
|
||||
)
|
||||
from ..base import BaseAPIView
|
||||
from plane.app.serializers import (
|
||||
IssueVersionDetailSerializer,
|
||||
@@ -66,7 +72,7 @@ class IssueVersionEndpoint(BaseAPIView):
|
||||
return Response(paginated_data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class IssueDescriptionVersionEndpoint(BaseAPIView):
|
||||
class WorkItemDescriptionVersionEndpoint(BaseAPIView):
|
||||
def process_paginated_result(self, fields, results, timezone):
|
||||
paginated_data = results.values(*fields)
|
||||
|
||||
@@ -78,10 +84,34 @@ class IssueDescriptionVersionEndpoint(BaseAPIView):
|
||||
return paginated_data
|
||||
|
||||
@allow_permission(allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def get(self, request, slug, project_id, issue_id, pk=None):
|
||||
def get(self, request, slug, project_id, work_item_id, pk=None):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
issue = Issue.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=work_item_id
|
||||
)
|
||||
|
||||
if (
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
role=ROLE.GUEST.value,
|
||||
is_active=True,
|
||||
).exists()
|
||||
and not project.guest_view_all_features
|
||||
and not issue.created_by == request.user
|
||||
):
|
||||
return Response(
|
||||
{"error": "You are not allowed to view this issue"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
if pk:
|
||||
issue_description_version = IssueDescriptionVersion.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_id=work_item_id,
|
||||
pk=pk,
|
||||
)
|
||||
|
||||
serializer = IssueDescriptionVersionDetailSerializer(
|
||||
@@ -105,8 +135,8 @@ class IssueDescriptionVersionEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
issue_description_versions_queryset = IssueDescriptionVersion.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id
|
||||
)
|
||||
workspace__slug=slug, project_id=project_id, issue_id=work_item_id
|
||||
).order_by("-created_at")
|
||||
paginated_data = paginate(
|
||||
base_queryset=issue_description_versions_queryset,
|
||||
queryset=issue_description_versions_queryset,
|
||||
|
||||
@@ -61,7 +61,7 @@ from plane.utils.timezone_converter import user_timezone_converter
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from .. import BaseAPIView, BaseViewSet
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class ModuleViewSet(BaseViewSet):
|
||||
model = Module
|
||||
@@ -376,7 +376,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
module = user_timezone_converter(
|
||||
@@ -710,23 +710,31 @@ class ModuleViewSet(BaseViewSet):
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
module = self.get_queryset().filter(pk=pk)
|
||||
module_queryset = self.get_queryset().filter(pk=pk)
|
||||
|
||||
if module.first().archived_at:
|
||||
current_module = module_queryset.first()
|
||||
|
||||
if not current_module:
|
||||
return Response(
|
||||
{"error": "Module not found"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
if current_module.archived_at:
|
||||
return Response(
|
||||
{"error": "Archived module cannot be updated"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
current_instance = json.dumps(
|
||||
ModuleSerializer(module.first()).data, cls=DjangoJSONEncoder
|
||||
ModuleSerializer(current_module).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
serializer = ModuleWriteSerializer(
|
||||
module.first(), data=request.data, partial=True
|
||||
current_module, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
module = module.values(
|
||||
module = module_queryset.values(
|
||||
# Required fields
|
||||
"id",
|
||||
"workspace_id",
|
||||
@@ -768,7 +776,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
@@ -795,7 +803,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
current_instance=json.dumps({"module_name": str(module.name)}),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for issue in module_issues
|
||||
]
|
||||
|
||||
@@ -34,7 +34,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class ModuleIssueViewSet(BaseViewSet):
|
||||
serializer_class = ModuleIssueSerializer
|
||||
@@ -221,7 +221,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for issue in issues
|
||||
]
|
||||
@@ -261,7 +261,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for module in modules
|
||||
]
|
||||
@@ -284,7 +284,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
module_issue.delete()
|
||||
|
||||
@@ -309,7 +309,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
module_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -42,6 +42,7 @@ from plane.bgtasks.page_version_task import page_version
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
from plane.bgtasks.copy_s3_object import copy_s3_objects
|
||||
|
||||
|
||||
def unarchive_archive_page_and_descendants(page_id, archived_at):
|
||||
# Your SQL query
|
||||
sql = """
|
||||
@@ -198,7 +199,7 @@ class PageViewSet(BaseViewSet):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
|
||||
"""
|
||||
if the role is guest and guest_view_all_features is false and owned by is not
|
||||
if the role is guest and guest_view_all_features is false and owned by is not
|
||||
the requesting user then dont show the page
|
||||
"""
|
||||
|
||||
@@ -572,6 +573,12 @@ class PageDuplicateEndpoint(BaseAPIView):
|
||||
pk=page_id, workspace__slug=slug, projects__id=project_id
|
||||
).first()
|
||||
|
||||
# check for permission
|
||||
if page.access == Page.PRIVATE_ACCESS and page.owned_by_id != request.user.id:
|
||||
return Response(
|
||||
{"error": "Permission denied"}, status=status.HTTP_403_FORBIDDEN
|
||||
)
|
||||
|
||||
# get all the project ids where page is present
|
||||
project_ids = ProjectPage.objects.filter(page_id=page_id).values_list(
|
||||
"project_id", flat=True
|
||||
|
||||
@@ -39,6 +39,7 @@ from plane.utils.cache import cache_response
|
||||
from plane.bgtasks.webhook_task import model_activity, webhook_activity
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
from plane.utils.exception_logger import log_exception
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ProjectViewSet(BaseViewSet):
|
||||
@@ -177,7 +178,9 @@ class ProjectViewSet(BaseViewSet):
|
||||
"module_view",
|
||||
"page_view",
|
||||
"inbox_view",
|
||||
"guest_view_all_features",
|
||||
"project_lead",
|
||||
"network",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
@@ -272,14 +275,14 @@ class ProjectViewSet(BaseViewSet):
|
||||
states = [
|
||||
{
|
||||
"name": "Backlog",
|
||||
"color": "#A3A3A3",
|
||||
"color": "#60646C",
|
||||
"sequence": 15000,
|
||||
"group": "backlog",
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"name": "Todo",
|
||||
"color": "#3A3A3A",
|
||||
"color": "#60646C",
|
||||
"sequence": 25000,
|
||||
"group": "unstarted",
|
||||
},
|
||||
@@ -291,13 +294,13 @@ class ProjectViewSet(BaseViewSet):
|
||||
},
|
||||
{
|
||||
"name": "Done",
|
||||
"color": "#16A34A",
|
||||
"color": "#46A758",
|
||||
"sequence": 45000,
|
||||
"group": "completed",
|
||||
},
|
||||
{
|
||||
"name": "Cancelled",
|
||||
"color": "#EF4444",
|
||||
"color": "#9AA4BC",
|
||||
"sequence": 55000,
|
||||
"group": "cancelled",
|
||||
},
|
||||
@@ -329,7 +332,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
serializer = ProjectListSerializer(project)
|
||||
@@ -339,7 +342,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The project name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
except Workspace.DoesNotExist:
|
||||
return Response(
|
||||
@@ -348,7 +351,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
except serializers.ValidationError:
|
||||
return Response(
|
||||
{"identifier": "The project identifier is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, pk=None):
|
||||
@@ -407,7 +410,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
serializer = ProjectListSerializer(project)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -417,7 +420,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The project name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
except (Project.DoesNotExist, Workspace.DoesNotExist):
|
||||
return Response(
|
||||
@@ -426,7 +429,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
except serializers.ValidationError:
|
||||
return Response(
|
||||
{"identifier": "The project identifier is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, pk):
|
||||
@@ -452,7 +455,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
new_value=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
current_site=request.META.get("HTTP_ORIGIN"),
|
||||
current_site=base_host(request=request, is_app=True),
|
||||
event_id=project.id,
|
||||
old_identifier=None,
|
||||
new_identifier=None,
|
||||
|
||||
@@ -16,18 +16,18 @@ from rest_framework.permissions import AllowAny
|
||||
# Module imports
|
||||
from .base import BaseViewSet, BaseAPIView
|
||||
from plane.app.serializers import ProjectMemberInviteSerializer
|
||||
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
|
||||
from plane.db.models import (
|
||||
ProjectMember,
|
||||
Workspace,
|
||||
ProjectMemberInvite,
|
||||
User,
|
||||
WorkspaceMember,
|
||||
Project,
|
||||
IssueUserProperty,
|
||||
)
|
||||
|
||||
from plane.db.models.project import ProjectNetwork
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class ProjectInvitationsViewset(BaseViewSet):
|
||||
serializer_class = ProjectMemberInviteSerializer
|
||||
@@ -99,7 +99,7 @@ class ProjectInvitationsViewset(BaseViewSet):
|
||||
project_invitations = ProjectMemberInvite.objects.bulk_create(
|
||||
project_invitations, batch_size=10, ignore_conflicts=True
|
||||
)
|
||||
current_site = request.META.get("HTTP_ORIGIN")
|
||||
current_site = base_host(request=request, is_app=True)
|
||||
|
||||
# Send invitations
|
||||
for invitation in project_invitations:
|
||||
@@ -128,6 +128,7 @@ class UserProjectInvitationsViewset(BaseViewSet):
|
||||
.select_related("workspace", "workspace__owner", "project")
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
|
||||
def create(self, request, slug):
|
||||
project_ids = request.data.get("project_ids", [])
|
||||
|
||||
@@ -136,11 +137,20 @@ class UserProjectInvitationsViewset(BaseViewSet):
|
||||
member=request.user, workspace__slug=slug, is_active=True
|
||||
)
|
||||
|
||||
if workspace_member.role not in [ROLE.ADMIN.value, ROLE.MEMBER.value]:
|
||||
return Response(
|
||||
{"error": "You do not have permission to join the project"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
# Get all the projects
|
||||
projects = Project.objects.filter(
|
||||
id__in=project_ids, workspace__slug=slug
|
||||
).only("id", "network")
|
||||
# Check if user has permission to join each project
|
||||
for project in projects:
|
||||
if (
|
||||
project.network == ProjectNetwork.SECRET.value
|
||||
and workspace_member.role != ROLE.ADMIN.value
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only workspace admins can join private project"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
workspace_role = workspace_member.role
|
||||
workspace = workspace_member.workspace
|
||||
|
||||
@@ -10,11 +10,7 @@ from plane.app.serializers import (
|
||||
ProjectMemberRoleSerializer,
|
||||
)
|
||||
|
||||
from plane.app.permissions import (
|
||||
ProjectMemberPermission,
|
||||
ProjectLitePermission,
|
||||
WorkspaceUserPermission,
|
||||
)
|
||||
from plane.app.permissions import WorkspaceUserPermission
|
||||
|
||||
from plane.db.models import Project, ProjectMember, IssueUserProperty, WorkspaceMember
|
||||
from plane.bgtasks.project_add_user_email_task import project_add_user_email
|
||||
@@ -26,14 +22,6 @@ class ProjectMemberViewSet(BaseViewSet):
|
||||
serializer_class = ProjectMemberAdminSerializer
|
||||
model = ProjectMember
|
||||
|
||||
def get_permissions(self):
|
||||
if self.action == "leave":
|
||||
self.permission_classes = [ProjectLitePermission]
|
||||
else:
|
||||
self.permission_classes = [ProjectMemberPermission]
|
||||
|
||||
return super(ProjectMemberViewSet, self).get_permissions()
|
||||
|
||||
search_fields = ["member__display_name", "member__first_name"]
|
||||
|
||||
def get_queryset(self):
|
||||
@@ -187,12 +175,20 @@ class ProjectMemberViewSet(BaseViewSet):
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@allow_permission([ROLE.ADMIN])
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
project_member = ProjectMember.objects.get(
|
||||
pk=pk, workspace__slug=slug, project_id=project_id, is_active=True
|
||||
)
|
||||
if request.user.id == project_member.member_id:
|
||||
|
||||
# Fetch the workspace role of the project member
|
||||
workspace_role = WorkspaceMember.objects.get(
|
||||
workspace__slug=slug, member=project_member.member, is_active=True
|
||||
).role
|
||||
is_workspace_admin = workspace_role == ROLE.ADMIN.value
|
||||
|
||||
# Check if the user is not editing their own role if they are not an admin
|
||||
if request.user.id == project_member.member_id and not is_workspace_admin:
|
||||
return Response(
|
||||
{"error": "You cannot update your own role"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -205,9 +201,6 @@ class ProjectMemberViewSet(BaseViewSet):
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
workspace_role = WorkspaceMember.objects.get(
|
||||
workspace__slug=slug, member=project_member.member, is_active=True
|
||||
).role
|
||||
if workspace_role in [5] and int(
|
||||
request.data.get("role", project_member.role)
|
||||
) in [15, 20]:
|
||||
@@ -222,6 +215,7 @@ class ProjectMemberViewSet(BaseViewSet):
|
||||
"role" in request.data
|
||||
and int(request.data.get("role", project_member.role))
|
||||
> requested_project_member.role
|
||||
and not is_workspace_admin
|
||||
):
|
||||
return Response(
|
||||
{"error": "You cannot update a role that is higher than your own role"},
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Django imports
|
||||
from django.db.models import Q
|
||||
from django.db.models import Q, QuerySet
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
@@ -12,6 +12,95 @@ from plane.utils.issue_search import search_issues
|
||||
|
||||
|
||||
class IssueSearchEndpoint(BaseAPIView):
|
||||
def filter_issues_by_project(self, project_id: int, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Filter issues by project
|
||||
"""
|
||||
|
||||
issues = issues.filter(project_id=project_id)
|
||||
|
||||
return issues
|
||||
|
||||
def search_issues_by_query(self, query: str, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Search issues by query
|
||||
"""
|
||||
|
||||
issues = search_issues(query, issues)
|
||||
|
||||
return issues
|
||||
|
||||
def search_issues_and_excluding_parent(
|
||||
self, issues: QuerySet, issue_id: str
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Search issues and epics by query excluding the parent
|
||||
"""
|
||||
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
if issue:
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id), ~Q(pk=issue.parent_id), ~Q(parent_id=issue_id)
|
||||
)
|
||||
return issues
|
||||
|
||||
def filter_issues_excluding_related_issues(
|
||||
self, issue_id: str, issues: QuerySet
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Filter issues excluding related issues
|
||||
"""
|
||||
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
related_issue_ids = (
|
||||
IssueRelation.objects.filter(Q(related_issue=issue) | Q(issue=issue))
|
||||
.values_list("issue_id", "related_issue_id")
|
||||
.distinct()
|
||||
)
|
||||
|
||||
related_issue_ids = [item for sublist in related_issue_ids for item in sublist]
|
||||
|
||||
if issue:
|
||||
issues = issues.filter(~Q(pk=issue_id), ~Q(pk__in=related_issue_ids))
|
||||
|
||||
return issues
|
||||
|
||||
def filter_root_issues_only(self, issue_id: str, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Filter root issues only
|
||||
"""
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
if issue:
|
||||
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
|
||||
if issue.parent:
|
||||
issues = issues.filter(~Q(pk=issue.parent_id))
|
||||
return issues
|
||||
|
||||
def exclude_issues_in_cycles(self, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Exclude issues in cycles
|
||||
"""
|
||||
issues = issues.exclude(
|
||||
Q(issue_cycle__isnull=False) & Q(issue_cycle__deleted_at__isnull=True)
|
||||
)
|
||||
return issues
|
||||
|
||||
def exclude_issues_in_module(self, issues: QuerySet, module: str) -> QuerySet:
|
||||
"""
|
||||
Exclude issues in a module
|
||||
"""
|
||||
issues = issues.exclude(
|
||||
Q(issue_module__module=module) & Q(issue_module__deleted_at__isnull=True)
|
||||
)
|
||||
return issues
|
||||
|
||||
def filter_issues_without_target_date(self, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Filter issues without a target date
|
||||
"""
|
||||
issues = issues.filter(target_date__isnull=True)
|
||||
return issues
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
query = request.query_params.get("search", False)
|
||||
workspace_search = request.query_params.get("workspace_search", "false")
|
||||
@@ -21,7 +110,6 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
module = request.query_params.get("module", False)
|
||||
sub_issue = request.query_params.get("sub_issue", "false")
|
||||
target_date = request.query_params.get("target_date", True)
|
||||
|
||||
issue_id = request.query_params.get("issue_id", False)
|
||||
|
||||
issues = Issue.issue_objects.filter(
|
||||
@@ -32,52 +120,28 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
if workspace_search == "false":
|
||||
issues = issues.filter(project_id=project_id)
|
||||
issues = self.filter_issues_by_project(project_id, issues)
|
||||
|
||||
if query:
|
||||
issues = search_issues(query, issues)
|
||||
issues = self.search_issues_by_query(query, issues)
|
||||
|
||||
if parent == "true" and issue_id:
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
if issue:
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id), ~Q(pk=issue.parent_id), ~Q(parent_id=issue_id)
|
||||
)
|
||||
issues = self.search_issues_and_excluding_parent(issues, issue_id)
|
||||
|
||||
if issue_relation == "true" and issue_id:
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
related_issue_ids = IssueRelation.objects.filter(
|
||||
Q(related_issue=issue) | Q(issue=issue)
|
||||
).values_list(
|
||||
"issue_id", "related_issue_id"
|
||||
).distinct()
|
||||
issues = self.filter_issues_excluding_related_issues(issue_id, issues)
|
||||
|
||||
related_issue_ids = [item for sublist in related_issue_ids for item in sublist]
|
||||
|
||||
if issue:
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id),
|
||||
~Q(pk__in=related_issue_ids),
|
||||
)
|
||||
if sub_issue == "true" and issue_id:
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
if issue:
|
||||
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
|
||||
if issue.parent:
|
||||
issues = issues.filter(~Q(pk=issue.parent_id))
|
||||
issues = self.filter_root_issues_only(issue_id, issues)
|
||||
|
||||
if cycle == "true":
|
||||
issues = issues.exclude(
|
||||
Q(issue_cycle__isnull=False) & Q(issue_cycle__deleted_at__isnull=True)
|
||||
)
|
||||
issues = self.exclude_issues_in_cycles(issues)
|
||||
|
||||
if module:
|
||||
issues = issues.exclude(
|
||||
Q(issue_module__module=module)
|
||||
& Q(issue_module__deleted_at__isnull=True)
|
||||
)
|
||||
issues = self.exclude_issues_in_module(issues, module)
|
||||
|
||||
if target_date == "none":
|
||||
issues = issues.filter(target_date__isnull=True)
|
||||
issues = self.filter_issues_without_target_date(issues)
|
||||
|
||||
if ProjectMember.objects.filter(
|
||||
project_id=project_id, member=self.request.user, is_active=True, role=5
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# Python imports
|
||||
from itertools import groupby
|
||||
from collections import defaultdict
|
||||
|
||||
# Django imports
|
||||
from django.db.utils import IntegrityError
|
||||
@@ -74,7 +75,19 @@ class StateViewSet(BaseViewSet):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def list(self, request, slug, project_id):
|
||||
states = StateSerializer(self.get_queryset(), many=True).data
|
||||
|
||||
grouped_states = defaultdict(list)
|
||||
for state in states:
|
||||
grouped_states[state["group"]].append(state)
|
||||
|
||||
for group, group_states in grouped_states.items():
|
||||
count = len(group_states)
|
||||
|
||||
for index, state in enumerate(group_states, start=1):
|
||||
state["order"] = index / count
|
||||
|
||||
grouped = request.GET.get("grouped", False)
|
||||
|
||||
if grouped == "true":
|
||||
state_dict = {}
|
||||
for key, value in groupby(
|
||||
@@ -83,6 +96,7 @@ class StateViewSet(BaseViewSet):
|
||||
):
|
||||
state_dict[str(key)] = list(value)
|
||||
return Response(state_dict, status=status.HTTP_200_OK)
|
||||
|
||||
return Response(states, status=status.HTTP_200_OK)
|
||||
|
||||
@invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False)
|
||||
|
||||
@@ -21,221 +21,161 @@ class TimezoneEndpoint(APIView):
|
||||
|
||||
throttle_classes = [AuthenticationThrottle]
|
||||
|
||||
@method_decorator(cache_page(60 * 60 * 24))
|
||||
@method_decorator(cache_page(60 * 60 * 2))
|
||||
def get(self, request):
|
||||
timezone_mapping = {
|
||||
"-1100": [
|
||||
("Midway Island", "Pacific/Midway"),
|
||||
("American Samoa", "Pacific/Pago_Pago"),
|
||||
],
|
||||
"-1000": [
|
||||
("Hawaii", "Pacific/Honolulu"),
|
||||
("Aleutian Islands", "America/Adak"),
|
||||
],
|
||||
"-0930": [("Marquesas Islands", "Pacific/Marquesas")],
|
||||
"-0900": [
|
||||
("Alaska", "America/Anchorage"),
|
||||
("Gambier Islands", "Pacific/Gambier"),
|
||||
],
|
||||
"-0800": [
|
||||
("Pacific Time (US and Canada)", "America/Los_Angeles"),
|
||||
("Baja California", "America/Tijuana"),
|
||||
],
|
||||
"-0700": [
|
||||
("Mountain Time (US and Canada)", "America/Denver"),
|
||||
("Arizona", "America/Phoenix"),
|
||||
("Chihuahua, Mazatlan", "America/Chihuahua"),
|
||||
],
|
||||
"-0600": [
|
||||
("Central Time (US and Canada)", "America/Chicago"),
|
||||
("Saskatchewan", "America/Regina"),
|
||||
("Guadalajara, Mexico City, Monterrey", "America/Mexico_City"),
|
||||
("Tegucigalpa, Honduras", "America/Tegucigalpa"),
|
||||
("Costa Rica", "America/Costa_Rica"),
|
||||
],
|
||||
"-0500": [
|
||||
("Eastern Time (US and Canada)", "America/New_York"),
|
||||
("Lima", "America/Lima"),
|
||||
("Bogota", "America/Bogota"),
|
||||
("Quito", "America/Guayaquil"),
|
||||
("Chetumal", "America/Cancun"),
|
||||
],
|
||||
"-0430": [("Caracas (Old Venezuela Time)", "America/Caracas")],
|
||||
"-0400": [
|
||||
("Atlantic Time (Canada)", "America/Halifax"),
|
||||
("Caracas", "America/Caracas"),
|
||||
("Santiago", "America/Santiago"),
|
||||
("La Paz", "America/La_Paz"),
|
||||
("Manaus", "America/Manaus"),
|
||||
("Georgetown", "America/Guyana"),
|
||||
("Bermuda", "Atlantic/Bermuda"),
|
||||
],
|
||||
"-0330": [("Newfoundland Time (Canada)", "America/St_Johns")],
|
||||
"-0300": [
|
||||
("Buenos Aires", "America/Argentina/Buenos_Aires"),
|
||||
("Brasilia", "America/Sao_Paulo"),
|
||||
("Greenland", "America/Godthab"),
|
||||
("Montevideo", "America/Montevideo"),
|
||||
("Falkland Islands", "Atlantic/Stanley"),
|
||||
],
|
||||
"-0200": [
|
||||
(
|
||||
"South Georgia and the South Sandwich Islands",
|
||||
"Atlantic/South_Georgia",
|
||||
)
|
||||
],
|
||||
"-0100": [
|
||||
("Azores", "Atlantic/Azores"),
|
||||
("Cape Verde Islands", "Atlantic/Cape_Verde"),
|
||||
],
|
||||
"+0000": [
|
||||
("Dublin", "Europe/Dublin"),
|
||||
("Reykjavik", "Atlantic/Reykjavik"),
|
||||
("Lisbon", "Europe/Lisbon"),
|
||||
("Monrovia", "Africa/Monrovia"),
|
||||
("Casablanca", "Africa/Casablanca"),
|
||||
],
|
||||
"+0100": [
|
||||
("Central European Time (Berlin, Rome, Paris)", "Europe/Paris"),
|
||||
("West Central Africa", "Africa/Lagos"),
|
||||
("Algiers", "Africa/Algiers"),
|
||||
("Lagos", "Africa/Lagos"),
|
||||
("Tunis", "Africa/Tunis"),
|
||||
],
|
||||
"+0200": [
|
||||
("Eastern European Time (Cairo, Helsinki, Kyiv)", "Europe/Kiev"),
|
||||
("Athens", "Europe/Athens"),
|
||||
("Jerusalem", "Asia/Jerusalem"),
|
||||
("Johannesburg", "Africa/Johannesburg"),
|
||||
("Harare, Pretoria", "Africa/Harare"),
|
||||
],
|
||||
"+0300": [
|
||||
("Moscow Time", "Europe/Moscow"),
|
||||
("Baghdad", "Asia/Baghdad"),
|
||||
("Nairobi", "Africa/Nairobi"),
|
||||
("Kuwait, Riyadh", "Asia/Riyadh"),
|
||||
],
|
||||
"+0330": [("Tehran", "Asia/Tehran")],
|
||||
"+0400": [
|
||||
("Abu Dhabi", "Asia/Dubai"),
|
||||
("Baku", "Asia/Baku"),
|
||||
("Yerevan", "Asia/Yerevan"),
|
||||
("Astrakhan", "Europe/Astrakhan"),
|
||||
("Tbilisi", "Asia/Tbilisi"),
|
||||
("Mauritius", "Indian/Mauritius"),
|
||||
],
|
||||
"+0500": [
|
||||
("Islamabad", "Asia/Karachi"),
|
||||
("Karachi", "Asia/Karachi"),
|
||||
("Tashkent", "Asia/Tashkent"),
|
||||
("Yekaterinburg", "Asia/Yekaterinburg"),
|
||||
("Maldives", "Indian/Maldives"),
|
||||
("Chagos", "Indian/Chagos"),
|
||||
],
|
||||
"+0530": [
|
||||
("Chennai", "Asia/Kolkata"),
|
||||
("Kolkata", "Asia/Kolkata"),
|
||||
("Mumbai", "Asia/Kolkata"),
|
||||
("New Delhi", "Asia/Kolkata"),
|
||||
("Sri Jayawardenepura", "Asia/Colombo"),
|
||||
],
|
||||
"+0545": [("Kathmandu", "Asia/Kathmandu")],
|
||||
"+0600": [
|
||||
("Dhaka", "Asia/Dhaka"),
|
||||
("Almaty", "Asia/Almaty"),
|
||||
("Bishkek", "Asia/Bishkek"),
|
||||
("Thimphu", "Asia/Thimphu"),
|
||||
],
|
||||
"+0630": [
|
||||
("Yangon (Rangoon)", "Asia/Yangon"),
|
||||
("Cocos Islands", "Indian/Cocos"),
|
||||
],
|
||||
"+0700": [
|
||||
("Bangkok", "Asia/Bangkok"),
|
||||
("Hanoi", "Asia/Ho_Chi_Minh"),
|
||||
("Jakarta", "Asia/Jakarta"),
|
||||
("Novosibirsk", "Asia/Novosibirsk"),
|
||||
("Krasnoyarsk", "Asia/Krasnoyarsk"),
|
||||
],
|
||||
"+0800": [
|
||||
("Beijing", "Asia/Shanghai"),
|
||||
("Singapore", "Asia/Singapore"),
|
||||
("Perth", "Australia/Perth"),
|
||||
("Hong Kong", "Asia/Hong_Kong"),
|
||||
("Ulaanbaatar", "Asia/Ulaanbaatar"),
|
||||
("Palau", "Pacific/Palau"),
|
||||
],
|
||||
"+0845": [("Eucla", "Australia/Eucla")],
|
||||
"+0900": [
|
||||
("Tokyo", "Asia/Tokyo"),
|
||||
("Seoul", "Asia/Seoul"),
|
||||
("Yakutsk", "Asia/Yakutsk"),
|
||||
],
|
||||
"+0930": [
|
||||
("Adelaide", "Australia/Adelaide"),
|
||||
("Darwin", "Australia/Darwin"),
|
||||
],
|
||||
"+1000": [
|
||||
("Sydney", "Australia/Sydney"),
|
||||
("Brisbane", "Australia/Brisbane"),
|
||||
("Guam", "Pacific/Guam"),
|
||||
("Vladivostok", "Asia/Vladivostok"),
|
||||
("Tahiti", "Pacific/Tahiti"),
|
||||
],
|
||||
"+1030": [("Lord Howe Island", "Australia/Lord_Howe")],
|
||||
"+1100": [
|
||||
("Solomon Islands", "Pacific/Guadalcanal"),
|
||||
("Magadan", "Asia/Magadan"),
|
||||
("Norfolk Island", "Pacific/Norfolk"),
|
||||
("Bougainville Island", "Pacific/Bougainville"),
|
||||
("Chokurdakh", "Asia/Srednekolymsk"),
|
||||
],
|
||||
"+1200": [
|
||||
("Auckland", "Pacific/Auckland"),
|
||||
("Wellington", "Pacific/Auckland"),
|
||||
("Fiji Islands", "Pacific/Fiji"),
|
||||
("Anadyr", "Asia/Anadyr"),
|
||||
],
|
||||
"+1245": [("Chatham Islands", "Pacific/Chatham")],
|
||||
"+1300": [("Nuku'alofa", "Pacific/Tongatapu"), ("Samoa", "Pacific/Apia")],
|
||||
"+1400": [("Kiritimati Island", "Pacific/Kiritimati")],
|
||||
}
|
||||
timezone_locations = [
|
||||
('Midway Island', 'Pacific/Midway'), # UTC-11:00
|
||||
('American Samoa', 'Pacific/Pago_Pago'), # UTC-11:00
|
||||
('Hawaii', 'Pacific/Honolulu'), # UTC-10:00
|
||||
('Aleutian Islands', 'America/Adak'), # UTC-10:00 (DST: UTC-09:00)
|
||||
('Marquesas Islands', 'Pacific/Marquesas'), # UTC-09:30
|
||||
('Alaska', 'America/Anchorage'), # UTC-09:00 (DST: UTC-08:00)
|
||||
('Gambier Islands', 'Pacific/Gambier'), # UTC-09:00
|
||||
('Pacific Time (US and Canada)', 'America/Los_Angeles'), # UTC-08:00 (DST: UTC-07:00)
|
||||
('Baja California', 'America/Tijuana'), # UTC-08:00 (DST: UTC-07:00)
|
||||
('Mountain Time (US and Canada)', 'America/Denver'), # UTC-07:00 (DST: UTC-06:00)
|
||||
('Arizona', 'America/Phoenix'), # UTC-07:00
|
||||
('Chihuahua, Mazatlan', 'America/Chihuahua'), # UTC-07:00 (DST: UTC-06:00)
|
||||
('Central Time (US and Canada)', 'America/Chicago'), # UTC-06:00 (DST: UTC-05:00)
|
||||
('Saskatchewan', 'America/Regina'), # UTC-06:00
|
||||
('Guadalajara, Mexico City, Monterrey', 'America/Mexico_City'), # UTC-06:00 (DST: UTC-05:00)
|
||||
('Tegucigalpa, Honduras', 'America/Tegucigalpa'), # UTC-06:00
|
||||
('Costa Rica', 'America/Costa_Rica'), # UTC-06:00
|
||||
('Eastern Time (US and Canada)', 'America/New_York'), # UTC-05:00 (DST: UTC-04:00)
|
||||
('Lima', 'America/Lima'), # UTC-05:00
|
||||
('Bogota', 'America/Bogota'), # UTC-05:00
|
||||
('Quito', 'America/Guayaquil'), # UTC-05:00
|
||||
('Chetumal', 'America/Cancun'), # UTC-05:00 (DST: UTC-04:00)
|
||||
('Caracas (Old Venezuela Time)', 'America/Caracas'), # UTC-04:30
|
||||
('Atlantic Time (Canada)', 'America/Halifax'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('Caracas', 'America/Caracas'), # UTC-04:00
|
||||
('Santiago', 'America/Santiago'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('La Paz', 'America/La_Paz'), # UTC-04:00
|
||||
('Manaus', 'America/Manaus'), # UTC-04:00
|
||||
('Georgetown', 'America/Guyana'), # UTC-04:00
|
||||
('Bermuda', 'Atlantic/Bermuda'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('Newfoundland Time (Canada)', 'America/St_Johns'), # UTC-03:30 (DST: UTC-02:30)
|
||||
('Buenos Aires', 'America/Argentina/Buenos_Aires'), # UTC-03:00
|
||||
('Brasilia', 'America/Sao_Paulo'), # UTC-03:00
|
||||
('Greenland', 'America/Godthab'), # UTC-03:00 (DST: UTC-02:00)
|
||||
('Montevideo', 'America/Montevideo'), # UTC-03:00
|
||||
('Falkland Islands', 'Atlantic/Stanley'), # UTC-03:00
|
||||
('South Georgia and the South Sandwich Islands', 'Atlantic/South_Georgia'), # UTC-02:00
|
||||
('Azores', 'Atlantic/Azores'), # UTC-01:00 (DST: UTC+00:00)
|
||||
('Cape Verde Islands', 'Atlantic/Cape_Verde'), # UTC-01:00
|
||||
('Dublin', 'Europe/Dublin'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Reykjavik', 'Atlantic/Reykjavik'), # UTC+00:00
|
||||
('Lisbon', 'Europe/Lisbon'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Monrovia', 'Africa/Monrovia'), # UTC+00:00
|
||||
('Casablanca', 'Africa/Casablanca'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Central European Time (Berlin, Rome, Paris)', 'Europe/Paris'), # UTC+01:00 (DST: UTC+02:00)
|
||||
('West Central Africa', 'Africa/Lagos'), # UTC+01:00
|
||||
('Algiers', 'Africa/Algiers'), # UTC+01:00
|
||||
('Lagos', 'Africa/Lagos'), # UTC+01:00
|
||||
('Tunis', 'Africa/Tunis'), # UTC+01:00
|
||||
('Eastern European Time (Cairo, Helsinki, Kyiv)', 'Europe/Kiev'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Athens', 'Europe/Athens'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Jerusalem', 'Asia/Jerusalem'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Johannesburg', 'Africa/Johannesburg'), # UTC+02:00
|
||||
('Harare, Pretoria', 'Africa/Harare'), # UTC+02:00
|
||||
('Moscow Time', 'Europe/Moscow'), # UTC+03:00
|
||||
('Baghdad', 'Asia/Baghdad'), # UTC+03:00
|
||||
('Nairobi', 'Africa/Nairobi'), # UTC+03:00
|
||||
('Kuwait, Riyadh', 'Asia/Riyadh'), # UTC+03:00
|
||||
('Tehran', 'Asia/Tehran'), # UTC+03:30 (DST: UTC+04:30)
|
||||
('Abu Dhabi', 'Asia/Dubai'), # UTC+04:00
|
||||
('Baku', 'Asia/Baku'), # UTC+04:00 (DST: UTC+05:00)
|
||||
('Yerevan', 'Asia/Yerevan'), # UTC+04:00 (DST: UTC+05:00)
|
||||
('Astrakhan', 'Europe/Astrakhan'), # UTC+04:00
|
||||
('Tbilisi', 'Asia/Tbilisi'), # UTC+04:00
|
||||
('Mauritius', 'Indian/Mauritius'), # UTC+04:00
|
||||
('Islamabad', 'Asia/Karachi'), # UTC+05:00
|
||||
('Karachi', 'Asia/Karachi'), # UTC+05:00
|
||||
('Tashkent', 'Asia/Tashkent'), # UTC+05:00
|
||||
('Yekaterinburg', 'Asia/Yekaterinburg'), # UTC+05:00
|
||||
('Maldives', 'Indian/Maldives'), # UTC+05:00
|
||||
('Chagos', 'Indian/Chagos'), # UTC+05:00
|
||||
('Chennai', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Kolkata', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Mumbai', 'Asia/Kolkata'), # UTC+05:30
|
||||
('New Delhi', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Sri Jayawardenepura', 'Asia/Colombo'), # UTC+05:30
|
||||
('Kathmandu', 'Asia/Kathmandu'), # UTC+05:45
|
||||
('Dhaka', 'Asia/Dhaka'), # UTC+06:00
|
||||
('Almaty', 'Asia/Almaty'), # UTC+06:00
|
||||
('Bishkek', 'Asia/Bishkek'), # UTC+06:00
|
||||
('Thimphu', 'Asia/Thimphu'), # UTC+06:00
|
||||
('Yangon (Rangoon)', 'Asia/Yangon'), # UTC+06:30
|
||||
('Cocos Islands', 'Indian/Cocos'), # UTC+06:30
|
||||
('Bangkok', 'Asia/Bangkok'), # UTC+07:00
|
||||
('Hanoi', 'Asia/Ho_Chi_Minh'), # UTC+07:00
|
||||
('Jakarta', 'Asia/Jakarta'), # UTC+07:00
|
||||
('Novosibirsk', 'Asia/Novosibirsk'), # UTC+07:00
|
||||
('Krasnoyarsk', 'Asia/Krasnoyarsk'), # UTC+07:00
|
||||
('Beijing', 'Asia/Shanghai'), # UTC+08:00
|
||||
('Singapore', 'Asia/Singapore'), # UTC+08:00
|
||||
('Perth', 'Australia/Perth'), # UTC+08:00
|
||||
('Hong Kong', 'Asia/Hong_Kong'), # UTC+08:00
|
||||
('Ulaanbaatar', 'Asia/Ulaanbaatar'), # UTC+08:00
|
||||
('Palau', 'Pacific/Palau'), # UTC+08:00
|
||||
('Eucla', 'Australia/Eucla'), # UTC+08:45
|
||||
('Tokyo', 'Asia/Tokyo'), # UTC+09:00
|
||||
('Seoul', 'Asia/Seoul'), # UTC+09:00
|
||||
('Yakutsk', 'Asia/Yakutsk'), # UTC+09:00
|
||||
('Adelaide', 'Australia/Adelaide'), # UTC+09:30 (DST: UTC+10:30)
|
||||
('Darwin', 'Australia/Darwin'), # UTC+09:30
|
||||
('Sydney', 'Australia/Sydney'), # UTC+10:00 (DST: UTC+11:00)
|
||||
('Brisbane', 'Australia/Brisbane'), # UTC+10:00
|
||||
('Guam', 'Pacific/Guam'), # UTC+10:00
|
||||
('Vladivostok', 'Asia/Vladivostok'), # UTC+10:00
|
||||
('Tahiti', 'Pacific/Tahiti'), # UTC+10:00
|
||||
('Lord Howe Island', 'Australia/Lord_Howe'), # UTC+10:30 (DST: UTC+11:00)
|
||||
('Solomon Islands', 'Pacific/Guadalcanal'), # UTC+11:00
|
||||
('Magadan', 'Asia/Magadan'), # UTC+11:00
|
||||
('Norfolk Island', 'Pacific/Norfolk'), # UTC+11:00
|
||||
('Bougainville Island', 'Pacific/Bougainville'), # UTC+11:00
|
||||
('Chokurdakh', 'Asia/Srednekolymsk'), # UTC+11:00
|
||||
('Auckland', 'Pacific/Auckland'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Wellington', 'Pacific/Auckland'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Fiji Islands', 'Pacific/Fiji'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Anadyr', 'Asia/Anadyr'), # UTC+12:00
|
||||
('Chatham Islands', 'Pacific/Chatham'), # UTC+12:45 (DST: UTC+13:45)
|
||||
("Nuku'alofa", 'Pacific/Tongatapu'), # UTC+13:00
|
||||
('Samoa', 'Pacific/Apia'), # UTC+13:00 (DST: UTC+14:00)
|
||||
('Kiritimati Island', 'Pacific/Kiritimati') # UTC+14:00
|
||||
]
|
||||
|
||||
timezone_list = []
|
||||
now = datetime.now()
|
||||
|
||||
# Process timezone mapping
|
||||
for offset, locations in timezone_mapping.items():
|
||||
sign = "-" if offset.startswith("-") else "+"
|
||||
hours = offset[1:3]
|
||||
minutes = offset[3:] if len(offset) > 3 else "00"
|
||||
for friendly_name, tz_identifier in timezone_locations:
|
||||
|
||||
for friendly_name, tz_identifier in locations:
|
||||
try:
|
||||
tz = pytz.timezone(tz_identifier)
|
||||
current_offset = now.astimezone(tz).strftime("%z")
|
||||
try:
|
||||
tz = pytz.timezone(tz_identifier)
|
||||
current_offset = now.astimezone(tz).strftime("%z")
|
||||
|
||||
# converting and formatting UTC offset to GMT offset
|
||||
current_utc_offset = now.astimezone(tz).utcoffset()
|
||||
total_seconds = int(current_utc_offset.total_seconds())
|
||||
hours_offset = total_seconds // 3600
|
||||
minutes_offset = abs(total_seconds % 3600) // 60
|
||||
gmt_offset = (
|
||||
f"GMT{'+' if hours_offset >= 0 else '-'}"
|
||||
f"{abs(hours_offset):02}:{minutes_offset:02}"
|
||||
)
|
||||
# converting and formatting UTC offset to GMT offset
|
||||
current_utc_offset = now.astimezone(tz).utcoffset()
|
||||
total_seconds = int(current_utc_offset.total_seconds())
|
||||
hours_offset = total_seconds // 3600
|
||||
minutes_offset = abs(total_seconds % 3600) // 60
|
||||
offset = (
|
||||
f"{'+' if hours_offset >= 0 else '-'}"
|
||||
f"{abs(hours_offset):02}:{minutes_offset:02}"
|
||||
)
|
||||
|
||||
timezone_value = {
|
||||
"offset": int(current_offset),
|
||||
"utc_offset": f"UTC{sign}{hours}:{minutes}",
|
||||
"gmt_offset": gmt_offset,
|
||||
"value": tz_identifier,
|
||||
"label": f"{friendly_name}",
|
||||
}
|
||||
timezone_value = {
|
||||
"offset": int(current_offset),
|
||||
"utc_offset": f"UTC{offset}",
|
||||
"gmt_offset": f"GMT{offset}",
|
||||
"value": tz_identifier,
|
||||
"label": f"{friendly_name}",
|
||||
}
|
||||
|
||||
timezone_list.append(timezone_value)
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
continue
|
||||
timezone_list.append(timezone_value)
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
continue
|
||||
|
||||
# Sort by offset and then by label
|
||||
timezone_list.sort(key=lambda x: (x["offset"], x["label"]))
|
||||
|
||||
@@ -117,7 +117,7 @@ class WorkspaceViewViewSet(BaseViewSet):
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@allow_permission(
|
||||
allowed_roles=[], level="WORKSPACE", creator=True, model=IssueView
|
||||
allowed_roles=[ROLE.ADMIN], level="WORKSPACE", creator=True, model=IssueView
|
||||
)
|
||||
def destroy(self, request, slug, pk):
|
||||
workspace_view = IssueView.objects.get(pk=pk, workspace__slug=slug)
|
||||
@@ -432,7 +432,7 @@ class IssueViewViewSet(BaseViewSet):
|
||||
):
|
||||
return Response(
|
||||
{"error": "You are not allowed to view this issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
serializer = IssueViewSerializer(issue_view)
|
||||
|
||||
@@ -29,7 +29,7 @@ class WebhookEndpoint(BaseAPIView):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"error": "URL already exists for the workspace"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
raise IntegrityError
|
||||
|
||||
|
||||
@@ -42,6 +42,7 @@ from django.views.decorators.cache import cache_control
|
||||
from django.views.decorators.vary import vary_on_cookie
|
||||
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.bgtasks.workspace_seed_task import workspace_seed
|
||||
|
||||
|
||||
class WorkSpaceViewSet(BaseViewSet):
|
||||
@@ -119,11 +120,15 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
# Get total members and role
|
||||
total_members=WorkspaceMember.objects.filter(workspace_id=serializer.data["id"]).count()
|
||||
total_members = WorkspaceMember.objects.filter(
|
||||
workspace_id=serializer.data["id"]
|
||||
).count()
|
||||
data = serializer.data
|
||||
data["total_members"] = total_members
|
||||
data["role"] = 20
|
||||
|
||||
workspace_seed.delay(serializer.data["id"])
|
||||
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
return Response(
|
||||
[serializer.errors[error][0] for error in serializer.errors],
|
||||
@@ -134,7 +139,7 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"slug": "The workspace with the slug already exists"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE")
|
||||
@@ -167,10 +172,9 @@ class UserWorkSpacesEndpoint(BaseAPIView):
|
||||
.values("count")
|
||||
)
|
||||
|
||||
role = (
|
||||
WorkspaceMember.objects.filter(workspace=OuterRef("id"), member=request.user, is_active=True)
|
||||
.values("role")
|
||||
)
|
||||
role = WorkspaceMember.objects.filter(
|
||||
workspace=OuterRef("id"), member=request.user, is_active=True
|
||||
).values("role")
|
||||
|
||||
workspace = (
|
||||
Workspace.objects.prefetch_related(
|
||||
|
||||
@@ -29,6 +29,7 @@ class WorkspaceCyclesEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -36,7 +36,7 @@ from plane.db.models import (
|
||||
from .. import BaseViewSet
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
model = DraftIssue
|
||||
@@ -241,7 +241,7 @@ class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
if request.data.get("cycle_id", None):
|
||||
@@ -270,7 +270,7 @@ class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
if request.data.get("module_ids", []):
|
||||
@@ -300,7 +300,7 @@ class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for module in request.data.get("module_ids", [])
|
||||
]
|
||||
|
||||
@@ -34,6 +34,22 @@ class WorkspaceFavoriteEndpoint(BaseAPIView):
|
||||
def post(self, request, slug):
|
||||
try:
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
# If the favorite exists return
|
||||
if request.data.get("entity_identifier"):
|
||||
user_favorites = UserFavorite.objects.filter(
|
||||
workspace=workspace,
|
||||
user_id=request.user.id,
|
||||
entity_type=request.data.get("entity_type"),
|
||||
entity_identifier=request.data.get("entity_identifier"),
|
||||
).first()
|
||||
|
||||
# If the favorite exists return
|
||||
if user_favorites:
|
||||
serializer = UserFavoriteSerializer(user_favorites)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
# else create a new favorite
|
||||
serializer = UserFavoriteSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
|
||||
@@ -7,7 +7,6 @@ import jwt
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import validate_email
|
||||
from django.db.models import Count
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party modules
|
||||
@@ -26,7 +25,8 @@ from plane.bgtasks.event_tracking_task import workspace_invite_event
|
||||
from plane.bgtasks.workspace_invitation_task import workspace_invitation
|
||||
from plane.db.models import User, Workspace, WorkspaceMember, WorkspaceMemberInvite
|
||||
from plane.utils.cache import invalidate_cache, invalidate_cache_directly
|
||||
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
from .. import BaseViewSet
|
||||
|
||||
|
||||
@@ -122,7 +122,7 @@ class WorkspaceInvitationsViewset(BaseViewSet):
|
||||
workspace_invitations, batch_size=10, ignore_conflicts=True
|
||||
)
|
||||
|
||||
current_site = request.META.get("HTTP_ORIGIN")
|
||||
current_site = base_host(request=request, is_app=True)
|
||||
|
||||
# Send invitations
|
||||
for invitation in workspace_invitations:
|
||||
@@ -213,7 +213,7 @@ class WorkspaceJoinEndpoint(BaseAPIView):
|
||||
user=user.id if user is not None else None,
|
||||
email=email,
|
||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
ip=get_client_ip(request=request),
|
||||
event_name="MEMBER_ACCEPTED",
|
||||
accepted_from="EMAIL",
|
||||
)
|
||||
|
||||
@@ -68,10 +68,11 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if workspace_member.role > int(request.data.get("role")):
|
||||
_ = ProjectMember.objects.filter(
|
||||
# If a user is moved to a guest role he can't have any other role in projects
|
||||
if "role" in request.data and int(request.data.get("role")) == 5:
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug, member_id=workspace_member.member_id
|
||||
).update(role=int(request.data.get("role")))
|
||||
).update(role=5)
|
||||
|
||||
serializer = WorkSpaceMemberSerializer(
|
||||
workspace_member, data=request.data, partial=True
|
||||
|
||||
@@ -8,6 +8,7 @@ from plane.app.views.base import BaseAPIView
|
||||
from plane.db.models import State
|
||||
from plane.app.permissions import WorkspaceEntityPermission
|
||||
from plane.utils.cache import cache_response
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
class WorkspaceStatesEndpoint(BaseAPIView):
|
||||
@@ -22,5 +23,16 @@ class WorkspaceStatesEndpoint(BaseAPIView):
|
||||
project__archived_at__isnull=True,
|
||||
is_triage=False,
|
||||
)
|
||||
|
||||
grouped_states = defaultdict(list)
|
||||
for state in states:
|
||||
grouped_states[state.group].append(state)
|
||||
|
||||
for group, group_states in grouped_states.items():
|
||||
count = len(group_states)
|
||||
|
||||
for index, state in enumerate(group_states, start=1):
|
||||
state.order = index / count
|
||||
|
||||
serializer = StateSerializer(states, many=True).data
|
||||
return Response(serializer, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -15,8 +15,8 @@ from plane.db.models import Profile, User, WorkspaceMemberInvite
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from .error import AuthenticationException, AUTHENTICATION_ERROR_CODES
|
||||
from plane.bgtasks.user_activation_email_task import user_activation_email
|
||||
from plane.authentication.utils.host import base_host
|
||||
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
class Adapter:
|
||||
"""Common interface for all auth providers"""
|
||||
@@ -108,7 +108,7 @@ class Adapter:
|
||||
user.last_login_medium = self.provider
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = self.request.META.get("REMOTE_ADDR")
|
||||
user.last_login_ip = get_client_ip(request=self.request)
|
||||
user.last_login_uagent = self.request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
# If user is not active, send the activation email and set the user as active
|
||||
|
||||
@@ -36,10 +36,12 @@ AUTHENTICATION_ERROR_CODES = {
|
||||
"OAUTH_NOT_CONFIGURED": 5104,
|
||||
"GOOGLE_NOT_CONFIGURED": 5105,
|
||||
"GITHUB_NOT_CONFIGURED": 5110,
|
||||
"GITHUB_USER_NOT_IN_ORG": 5122,
|
||||
"GITLAB_NOT_CONFIGURED": 5111,
|
||||
"GOOGLE_OAUTH_PROVIDER_ERROR": 5115,
|
||||
"GITHUB_OAUTH_PROVIDER_ERROR": 5120,
|
||||
"GITLAB_OAUTH_PROVIDER_ERROR": 5121,
|
||||
|
||||
# Reset Password
|
||||
"INVALID_PASSWORD_TOKEN": 5125,
|
||||
"EXPIRED_PASSWORD_TOKEN": 5130,
|
||||
|
||||
@@ -18,11 +18,16 @@ from plane.authentication.adapter.error import (
|
||||
class GitHubOAuthProvider(OauthAdapter):
|
||||
token_url = "https://github.com/login/oauth/access_token"
|
||||
userinfo_url = "https://api.github.com/user"
|
||||
org_membership_url = f"https://api.github.com/orgs"
|
||||
|
||||
provider = "github"
|
||||
scope = "read:user user:email"
|
||||
|
||||
organization_scope = "read:org"
|
||||
|
||||
|
||||
def __init__(self, request, code=None, state=None, callback=None):
|
||||
GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET = get_configuration_value(
|
||||
GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, GITHUB_ORGANIZATION_ID = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "GITHUB_CLIENT_ID",
|
||||
@@ -32,6 +37,10 @@ class GitHubOAuthProvider(OauthAdapter):
|
||||
"key": "GITHUB_CLIENT_SECRET",
|
||||
"default": os.environ.get("GITHUB_CLIENT_SECRET"),
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_ORGANIZATION_ID",
|
||||
"default": os.environ.get("GITHUB_ORGANIZATION_ID"),
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
@@ -43,6 +52,10 @@ class GitHubOAuthProvider(OauthAdapter):
|
||||
|
||||
client_id = GITHUB_CLIENT_ID
|
||||
client_secret = GITHUB_CLIENT_SECRET
|
||||
self.organization_id = GITHUB_ORGANIZATION_ID
|
||||
|
||||
if self.organization_id:
|
||||
self.scope += f" {self.organization_scope}"
|
||||
|
||||
redirect_uri = f"""{"https" if request.is_secure() else "http"}://{request.get_host()}/auth/github/callback/"""
|
||||
url_params = {
|
||||
@@ -113,12 +126,26 @@ class GitHubOAuthProvider(OauthAdapter):
|
||||
error_message="GITHUB_OAUTH_PROVIDER_ERROR",
|
||||
)
|
||||
|
||||
def is_user_in_organization(self, github_username):
|
||||
headers = {"Authorization": f"Bearer {self.token_data.get('access_token')}"}
|
||||
response = requests.get(f"{self.org_membership_url}/{self.organization_id}/memberships/{github_username}", headers=headers)
|
||||
return response.status_code == 200 # 200 means the user is a member
|
||||
|
||||
def set_user_data(self):
|
||||
user_info_response = self.get_user_response()
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.token_data.get('access_token')}",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
if self.organization_id:
|
||||
if not self.is_user_in_organization(user_info_response.get("login")):
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["GITHUB_USER_NOT_IN_ORG"],
|
||||
error_message="GITHUB_USER_NOT_IN_ORG",
|
||||
)
|
||||
|
||||
|
||||
email = self.__get_email(headers=headers)
|
||||
super().set_user_data(
|
||||
{
|
||||
|
||||
@@ -1,32 +1,53 @@
|
||||
# Python imports
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.http import HttpRequest
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.request import Request
|
||||
|
||||
# Module imports
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
|
||||
def base_host(request, is_admin=False, is_space=False, is_app=False):
|
||||
def base_host(
|
||||
request: Request | HttpRequest,
|
||||
is_admin: bool = False,
|
||||
is_space: bool = False,
|
||||
is_app: bool = False,
|
||||
) -> str:
|
||||
"""Utility function to return host / origin from the request"""
|
||||
# Calculate the base origin from request
|
||||
base_origin = str(
|
||||
request.META.get("HTTP_ORIGIN")
|
||||
or f"{urlsplit(request.META.get('HTTP_REFERER')).scheme}://{urlsplit(request.META.get('HTTP_REFERER')).netloc}"
|
||||
or f"""{"https" if request.is_secure() else "http"}://{request.get_host()}"""
|
||||
)
|
||||
base_origin = settings.WEB_URL or settings.APP_BASE_URL
|
||||
|
||||
# Admin redirections
|
||||
# Admin redirection
|
||||
if is_admin:
|
||||
if settings.ADMIN_BASE_URL:
|
||||
return settings.ADMIN_BASE_URL
|
||||
else:
|
||||
return base_origin + "/god-mode/"
|
||||
admin_base_path = getattr(settings, "ADMIN_BASE_PATH", None)
|
||||
if not isinstance(admin_base_path, str):
|
||||
admin_base_path = "/god-mode/"
|
||||
if not admin_base_path.startswith("/"):
|
||||
admin_base_path = "/" + admin_base_path
|
||||
if not admin_base_path.endswith("/"):
|
||||
admin_base_path += "/"
|
||||
|
||||
# Space redirections
|
||||
if is_space:
|
||||
if settings.SPACE_BASE_URL:
|
||||
return settings.SPACE_BASE_URL
|
||||
if settings.ADMIN_BASE_URL:
|
||||
return settings.ADMIN_BASE_URL + admin_base_path
|
||||
else:
|
||||
return base_origin + "/spaces/"
|
||||
return base_origin + admin_base_path
|
||||
|
||||
# Space redirection
|
||||
if is_space:
|
||||
space_base_path = getattr(settings, "SPACE_BASE_PATH", None)
|
||||
if not isinstance(space_base_path, str):
|
||||
space_base_path = "/spaces/"
|
||||
if not space_base_path.startswith("/"):
|
||||
space_base_path = "/" + space_base_path
|
||||
if not space_base_path.endswith("/"):
|
||||
space_base_path += "/"
|
||||
|
||||
if settings.SPACE_BASE_URL:
|
||||
return settings.SPACE_BASE_URL + space_base_path
|
||||
else:
|
||||
return base_origin + space_base_path
|
||||
|
||||
# App Redirection
|
||||
if is_app:
|
||||
@@ -38,5 +59,5 @@ def base_host(request, is_admin=False, is_space=False, is_app=False):
|
||||
return base_origin
|
||||
|
||||
|
||||
def user_ip(request):
|
||||
return str(request.META.get("REMOTE_ADDR"))
|
||||
def user_ip(request: Request | HttpRequest) -> str:
|
||||
return get_client_ip(request=request)
|
||||
|
||||
@@ -3,8 +3,8 @@ from django.contrib.auth import login
|
||||
from django.conf import settings
|
||||
|
||||
# Module imports
|
||||
from plane.authentication.utils.host import base_host
|
||||
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
def user_login(request, user, is_app=False, is_admin=False, is_space=False):
|
||||
login(request=request, user=user)
|
||||
@@ -15,7 +15,7 @@ def user_login(request, user, is_app=False, is_admin=False, is_space=False):
|
||||
|
||||
device_info = {
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT", ""),
|
||||
"ip_address": request.META.get("REMOTE_ADDR", ""),
|
||||
"ip_address": get_client_ip(request=request),
|
||||
"domain": base_host(
|
||||
request=request, is_app=is_app, is_admin=is_admin, is_space=is_space
|
||||
),
|
||||
|
||||
@@ -19,7 +19,7 @@ from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
class SignInAuthEndpoint(View):
|
||||
def post(self, request):
|
||||
@@ -34,7 +34,7 @@ class SignInAuthEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
# Base URL join
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
@@ -58,7 +58,7 @@ class SignInAuthEndpoint(View):
|
||||
params = exc.get_error_dict()
|
||||
# Next path
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
)
|
||||
@@ -76,7 +76,7 @@ class SignInAuthEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
)
|
||||
@@ -92,7 +92,7 @@ class SignInAuthEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
)
|
||||
@@ -111,7 +111,7 @@ class SignInAuthEndpoint(View):
|
||||
user_login(request=request, user=user, is_app=True)
|
||||
# Get the redirection path
|
||||
if next_path:
|
||||
path = str(next_path)
|
||||
path = str(validate_next_path(next_path))
|
||||
else:
|
||||
path = get_redirection_path(user=user)
|
||||
|
||||
@@ -121,7 +121,7 @@ class SignInAuthEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
)
|
||||
@@ -141,7 +141,7 @@ class SignUpAuthEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -161,7 +161,7 @@ class SignUpAuthEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -179,7 +179,7 @@ class SignUpAuthEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -197,7 +197,7 @@ class SignUpAuthEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -216,7 +216,7 @@ class SignUpAuthEndpoint(View):
|
||||
user_login(request=request, user=user, is_app=True)
|
||||
# Get the redirection path
|
||||
if next_path:
|
||||
path = next_path
|
||||
path = str(validate_next_path(next_path))
|
||||
else:
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
@@ -225,7 +225,7 @@ class SignUpAuthEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
|
||||
@@ -16,7 +16,7 @@ from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
class GitHubOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
@@ -35,7 +35,7 @@ class GitHubOauthInitiateEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -49,7 +49,7 @@ class GitHubOauthInitiateEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -70,7 +70,7 @@ class GitHubCallbackEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -81,7 +81,7 @@ class GitHubCallbackEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -94,7 +94,7 @@ class GitHubCallbackEndpoint(View):
|
||||
user_login(request=request, user=user, is_app=True)
|
||||
# Get the redirection path
|
||||
if next_path:
|
||||
path = next_path
|
||||
path = str(validate_next_path(next_path))
|
||||
else:
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
@@ -103,6 +103,6 @@ class GitHubCallbackEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -16,7 +16,7 @@ from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
class GitLabOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
@@ -24,7 +24,7 @@ class GitLabOauthInitiateEndpoint(View):
|
||||
request.session["host"] = base_host(request=request, is_app=True)
|
||||
next_path = request.GET.get("next_path")
|
||||
if next_path:
|
||||
request.session["next_path"] = str(next_path)
|
||||
request.session["next_path"] = str(validate_next_path(next_path))
|
||||
|
||||
# Check instance configuration
|
||||
instance = Instance.objects.first()
|
||||
@@ -35,7 +35,7 @@ class GitLabOauthInitiateEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -49,7 +49,7 @@ class GitLabOauthInitiateEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -81,7 +81,7 @@ class GitLabCallbackEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -94,7 +94,7 @@ class GitLabCallbackEndpoint(View):
|
||||
user_login(request=request, user=user, is_app=True)
|
||||
# Get the redirection path
|
||||
if next_path:
|
||||
path = next_path
|
||||
path = str(validate_next_path(next_path))
|
||||
else:
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
@@ -103,6 +103,6 @@ class GitLabCallbackEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -18,7 +18,7 @@ from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
class GoogleOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
@@ -36,7 +36,7 @@ class GoogleOauthInitiateEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -51,7 +51,7 @@ class GoogleOauthInitiateEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -72,7 +72,7 @@ class GoogleCallbackEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
if not code:
|
||||
@@ -82,7 +82,7 @@ class GoogleCallbackEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = next_path
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
try:
|
||||
@@ -95,11 +95,11 @@ class GoogleCallbackEndpoint(View):
|
||||
# Get the redirection path
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
url = urljoin(base_host, str(next_path) if next_path else path)
|
||||
url = urljoin(base_host, str(validate_next_path(next_path)) if next_path else path)
|
||||
return HttpResponseRedirect(url)
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -26,6 +26,7 @@ from plane.authentication.adapter.error import (
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
from plane.authentication.rate_limit import AuthenticationThrottle
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class MagicGenerateEndpoint(APIView):
|
||||
@@ -43,14 +44,13 @@ class MagicGenerateEndpoint(APIView):
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
origin = request.META.get("HTTP_ORIGIN", "/")
|
||||
email = request.data.get("email", "").strip().lower()
|
||||
try:
|
||||
validate_email(email)
|
||||
adapter = MagicCodeProvider(request=request, key=email)
|
||||
key, token = adapter.initiate()
|
||||
# If the smtp is configured send through here
|
||||
magic_link.delay(email, key, token, origin)
|
||||
magic_link.delay(email, key, token)
|
||||
return Response({"key": str(key)}, status=status.HTTP_200_OK)
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
@@ -73,7 +73,7 @@ class MagicSignInEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
)
|
||||
@@ -89,7 +89,7 @@ class MagicSignInEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
)
|
||||
@@ -122,7 +122,7 @@ class MagicSignInEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
)
|
||||
@@ -145,7 +145,7 @@ class MagicSignUpEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -159,7 +159,7 @@ class MagicSignUpEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -177,7 +177,7 @@ class MagicSignUpEndpoint(View):
|
||||
user_login(request=request, user=user, is_app=True)
|
||||
# Get the redirection path
|
||||
if next_path:
|
||||
path = str(next_path)
|
||||
path = str(validate_next_path(next_path))
|
||||
else:
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
@@ -187,7 +187,7 @@ class MagicSignUpEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
|
||||
@@ -80,7 +80,7 @@ class ForgotPasswordEndpoint(APIView):
|
||||
if user:
|
||||
# Get the reset token for user
|
||||
uidb64, token = generate_password_token(user=user)
|
||||
current_site = request.META.get("HTTP_ORIGIN")
|
||||
current_site = base_host(request=request, is_app=True)
|
||||
# send the forgot password email
|
||||
forgot_password.delay(
|
||||
user.first_name, user.email, uidb64, token, current_site
|
||||
|
||||
@@ -44,10 +44,21 @@ class ChangePasswordEndpoint(APIView):
|
||||
def post(self, request):
|
||||
user = User.objects.get(pk=request.user.id)
|
||||
|
||||
old_password = request.data.get("old_password", False)
|
||||
# If the user password is not autoset then we need to check the old passwords
|
||||
if not user.is_password_autoset:
|
||||
old_password = request.data.get("old_password", False)
|
||||
if not old_password:
|
||||
exc = AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["MISSING_PASSWORD"],
|
||||
error_message="MISSING_PASSWORD",
|
||||
payload={"error": "Old password is missing"},
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Get the new password
|
||||
new_password = request.data.get("new_password", False)
|
||||
|
||||
if not old_password or not new_password:
|
||||
if not new_password:
|
||||
exc = AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["MISSING_PASSWORD"],
|
||||
error_message="MISSING_PASSWORD",
|
||||
@@ -55,7 +66,9 @@ class ChangePasswordEndpoint(APIView):
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if not user.check_password(old_password):
|
||||
|
||||
# If the user password is not autoset then we need to check the old passwords
|
||||
if not user.is_password_autoset and not user.check_password(old_password):
|
||||
exc = AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["INCORRECT_OLD_PASSWORD"],
|
||||
error_message="INCORRECT_OLD_PASSWORD",
|
||||
|
||||
@@ -17,6 +17,7 @@ from plane.authentication.adapter.error import (
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
AuthenticationException,
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class SignInAuthSpaceEndpoint(View):
|
||||
@@ -32,7 +33,7 @@ class SignInAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -51,7 +52,7 @@ class SignInAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -67,7 +68,7 @@ class SignInAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -82,7 +83,7 @@ class SignInAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -99,7 +100,7 @@ class SignInAuthSpaceEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -117,7 +118,7 @@ class SignUpAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -135,7 +136,7 @@ class SignUpAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
# Validate the email
|
||||
@@ -151,7 +152,7 @@ class SignUpAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -166,7 +167,7 @@ class SignUpAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -183,6 +184,6 @@ class SignUpAuthSpaceEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -15,6 +15,7 @@ from plane.authentication.adapter.error import (
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
AuthenticationException,
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GitHubOauthInitiateSpaceEndpoint(View):
|
||||
@@ -34,7 +35,7 @@ class GitHubOauthInitiateSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -66,7 +67,7 @@ class GitHubCallbackSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -77,7 +78,7 @@ class GitHubCallbackSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -93,6 +94,6 @@ class GitHubCallbackSpaceEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -15,6 +15,7 @@ from plane.authentication.adapter.error import (
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
AuthenticationException,
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GitLabOauthInitiateSpaceEndpoint(View):
|
||||
@@ -34,7 +35,7 @@ class GitLabOauthInitiateSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -66,7 +67,7 @@ class GitLabCallbackSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -77,7 +78,7 @@ class GitLabCallbackSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -93,6 +94,6 @@ class GitLabCallbackSpaceEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -15,6 +15,7 @@ from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GoogleOauthInitiateSpaceEndpoint(View):
|
||||
@@ -33,7 +34,7 @@ class GoogleOauthInitiateSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -46,7 +47,7 @@ class GoogleOauthInitiateSpaceEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -65,7 +66,7 @@ class GoogleCallbackSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
if not code:
|
||||
@@ -75,7 +76,7 @@ class GoogleCallbackSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = next_path
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
try:
|
||||
@@ -89,6 +90,6 @@ class GoogleCallbackSpaceEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -23,7 +23,7 @@ from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
class MagicGenerateSpaceEndpoint(APIView):
|
||||
permission_classes = [AllowAny]
|
||||
@@ -38,14 +38,14 @@ class MagicGenerateSpaceEndpoint(APIView):
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
origin = base_host(request=request, is_space=True)
|
||||
|
||||
email = request.data.get("email", "").strip().lower()
|
||||
try:
|
||||
validate_email(email)
|
||||
adapter = MagicCodeProvider(request=request, key=email)
|
||||
key, token = adapter.initiate()
|
||||
# If the smtp is configured send through here
|
||||
magic_link.delay(email, key, token, origin)
|
||||
magic_link.delay(email, key, token)
|
||||
return Response({"key": str(key)}, status=status.HTTP_200_OK)
|
||||
except AuthenticationException as e:
|
||||
return Response(e.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -67,7 +67,7 @@ class MagicSignInSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -80,7 +80,7 @@ class MagicSignInSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -121,7 +121,7 @@ class MagicSignUpSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
# Existing User
|
||||
@@ -134,7 +134,7 @@ class MagicSignUpSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -152,6 +152,6 @@ class MagicSignUpSpaceEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -90,7 +90,7 @@ class ForgotPasswordSpaceEndpoint(APIView):
|
||||
if user:
|
||||
# Get the reset token for user
|
||||
uidb64, token = generate_password_token(user=user)
|
||||
current_site = request.META.get("HTTP_ORIGIN")
|
||||
current_site = base_host(request=request, is_space=True)
|
||||
# send the forgot password email
|
||||
forgot_password.delay(
|
||||
user.first_name, user.email, uidb64, token, current_site
|
||||
|
||||
@@ -7,6 +7,7 @@ from django.utils import timezone
|
||||
# Module imports
|
||||
from plane.authentication.utils.host import base_host, user_ip
|
||||
from plane.db.models import User
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class SignOutAuthSpaceEndpoint(View):
|
||||
@@ -21,8 +22,8 @@ class SignOutAuthSpaceEndpoint(View):
|
||||
user.save()
|
||||
# Log the user out
|
||||
logout(request)
|
||||
url = f"{base_host(request=request, is_space=True)}{next_path}"
|
||||
url = f"{base_host(request=request, is_space=True)}{str(validate_next_path(next_path)) if next_path else ''}"
|
||||
return HttpResponseRedirect(url)
|
||||
except Exception:
|
||||
url = f"{base_host(request=request, is_space=True)}{next_path}"
|
||||
url = f"{base_host(request=request, is_space=True)}{str(validate_next_path(next_path)) if next_path else ''}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -459,8 +459,37 @@ def analytic_export_task(email, data, slug):
|
||||
|
||||
csv_buffer = generate_csv_from_rows(rows)
|
||||
send_export_email(email, slug, csv_buffer, rows)
|
||||
logging.getLogger("plane").info("Email sent succesfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return
|
||||
|
||||
|
||||
@shared_task
|
||||
def export_analytics_to_csv_email(data, headers, keys, email, slug):
|
||||
try:
|
||||
"""
|
||||
Prepares a CSV from data and sends it as an email attachment.
|
||||
|
||||
Parameters:
|
||||
- data: List of dictionaries (e.g. from .values())
|
||||
- headers: List of CSV column headers
|
||||
- keys: Keys to extract from each data item (dict)
|
||||
- email: Email address to send to
|
||||
- slug: Used for the filename
|
||||
"""
|
||||
# Prepare rows: header + data rows
|
||||
rows = [headers]
|
||||
for item in data:
|
||||
row = [item.get(key, "") for key in keys]
|
||||
rows.append(row)
|
||||
|
||||
# Generate CSV buffer
|
||||
csv_buffer = generate_csv_from_rows(rows)
|
||||
|
||||
# Send email with CSV attachment
|
||||
send_export_email(email=email, slug=slug, csv_buffer=csv_buffer, rows=rows)
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return
|
||||
|
||||
@@ -12,6 +12,7 @@ from plane.db.models import FileAsset, Page, Issue
|
||||
from plane.utils.exception_logger import log_exception
|
||||
from plane.settings.storage import S3Storage
|
||||
from celery import shared_task
|
||||
from plane.utils.url import normalize_url_path
|
||||
|
||||
|
||||
def get_entity_id_field(entity_type, entity_id):
|
||||
@@ -67,11 +68,14 @@ def sync_with_external_service(entity_name, description_html):
|
||||
"description_html": description_html,
|
||||
"variant": "rich" if entity_name == "PAGE" else "document",
|
||||
}
|
||||
response = requests.post(
|
||||
f"{settings.LIVE_BASE_URL}/convert-document/",
|
||||
json=data,
|
||||
headers=None,
|
||||
)
|
||||
|
||||
live_url = settings.LIVE_URL
|
||||
if not live_url:
|
||||
return {}
|
||||
|
||||
url = normalize_url_path(f"{live_url}/convert-document/")
|
||||
|
||||
response = requests.post(url, json=data, headers=None)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
except requests.RequestException as e:
|
||||
|
||||
@@ -33,6 +33,7 @@ from plane.db.models import (
|
||||
Intake,
|
||||
IntakeIssue,
|
||||
)
|
||||
from plane.db.models.intake import SourceType
|
||||
|
||||
|
||||
def create_project(workspace, user_id):
|
||||
@@ -388,7 +389,7 @@ def create_intake_issues(workspace, project, user_id, intake_issue_count):
|
||||
if status == 0
|
||||
else None
|
||||
),
|
||||
source="in-app",
|
||||
source=SourceType.IN_APP,
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
)
|
||||
|
||||
@@ -309,7 +309,7 @@ def send_email_notification(
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email Sent Successfully")
|
||||
logging.getLogger("plane.worker").info("Email Sent Successfully")
|
||||
|
||||
# Update the logs
|
||||
EmailNotificationLog.objects.filter(
|
||||
@@ -325,7 +325,7 @@ def send_email_notification(
|
||||
release_lock(lock_id=lock_id)
|
||||
return
|
||||
else:
|
||||
logging.getLogger("plane").info("Duplicate email received skipping")
|
||||
logging.getLogger("plane.worker").info("Duplicate email received skipping")
|
||||
return
|
||||
except (Issue.DoesNotExist, User.DoesNotExist):
|
||||
release_lock(lock_id=lock_id)
|
||||
|
||||
@@ -3,34 +3,48 @@ import csv
|
||||
import io
|
||||
import json
|
||||
import zipfile
|
||||
|
||||
from typing import List
|
||||
import boto3
|
||||
from botocore.client import Config
|
||||
|
||||
from uuid import UUID
|
||||
from datetime import datetime, date
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from openpyxl import Workbook
|
||||
from django.db.models import F, Prefetch
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import ExporterHistory, Issue
|
||||
from plane.db.models import ExporterHistory, Issue, FileAsset, Label, User, IssueComment
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
|
||||
def dateTimeConverter(time):
|
||||
def dateTimeConverter(time: datetime) -> str | None:
|
||||
"""
|
||||
Convert a datetime object to a formatted string.
|
||||
"""
|
||||
if time:
|
||||
return time.strftime("%a, %d %b %Y %I:%M:%S %Z%z")
|
||||
|
||||
|
||||
def dateConverter(time):
|
||||
def dateConverter(time: date) -> str | None:
|
||||
"""
|
||||
Convert a date object to a formatted string.
|
||||
"""
|
||||
if time:
|
||||
return time.strftime("%a, %d %b %Y")
|
||||
|
||||
|
||||
def create_csv_file(data):
|
||||
def create_csv_file(data: List[List[str]]) -> str:
|
||||
"""
|
||||
Create a CSV file from the provided data.
|
||||
"""
|
||||
csv_buffer = io.StringIO()
|
||||
csv_writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
|
||||
|
||||
@@ -41,11 +55,17 @@ def create_csv_file(data):
|
||||
return csv_buffer.getvalue()
|
||||
|
||||
|
||||
def create_json_file(data):
|
||||
def create_json_file(data: List[dict]) -> str:
|
||||
"""
|
||||
Create a JSON file from the provided data.
|
||||
"""
|
||||
return json.dumps(data)
|
||||
|
||||
|
||||
def create_xlsx_file(data):
|
||||
def create_xlsx_file(data: List[List[str]]) -> bytes:
|
||||
"""
|
||||
Create an XLSX file from the provided data.
|
||||
"""
|
||||
workbook = Workbook()
|
||||
sheet = workbook.active
|
||||
|
||||
@@ -58,7 +78,10 @@ def create_xlsx_file(data):
|
||||
return xlsx_buffer.getvalue()
|
||||
|
||||
|
||||
def create_zip_file(files):
|
||||
def create_zip_file(files: List[tuple[str, str | bytes]]) -> io.BytesIO:
|
||||
"""
|
||||
Create a ZIP file from the provided files.
|
||||
"""
|
||||
zip_buffer = io.BytesIO()
|
||||
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
for filename, file_content in files:
|
||||
@@ -67,8 +90,11 @@ def create_zip_file(files):
|
||||
zip_buffer.seek(0)
|
||||
return zip_buffer
|
||||
|
||||
|
||||
def upload_to_s3(zip_file, workspace_id, token_id, slug):
|
||||
# TODO: Change the upload_to_s3 function to use the new storage method with entry in file asset table
|
||||
def upload_to_s3(zip_file: io.BytesIO, workspace_id: UUID, token_id: str, slug: str) -> None:
|
||||
"""
|
||||
Upload a ZIP file to S3 and generate a presigned URL.
|
||||
"""
|
||||
file_name = (
|
||||
f"{workspace_id}/export-{slug}-{token_id[:6]}-{str(timezone.now().date())}.zip"
|
||||
)
|
||||
@@ -150,75 +176,85 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
|
||||
exporter_instance.save(update_fields=["status", "url", "key"])
|
||||
|
||||
|
||||
def generate_table_row(issue):
|
||||
def generate_table_row(issue: dict) -> List[str]:
|
||||
"""
|
||||
Generate a table row from an issue dictionary.
|
||||
"""
|
||||
return [
|
||||
f"""{issue["project__identifier"]}-{issue["sequence_id"]}""",
|
||||
issue["project__name"],
|
||||
f"""{issue["project_identifier"]}-{issue["sequence_id"]}""",
|
||||
issue["project_name"],
|
||||
issue["name"],
|
||||
issue["description_stripped"],
|
||||
issue["state__name"],
|
||||
issue["description"],
|
||||
issue["state_name"],
|
||||
dateConverter(issue["start_date"]),
|
||||
dateConverter(issue["target_date"]),
|
||||
issue["priority"],
|
||||
(
|
||||
f"{issue['created_by__first_name']} {issue['created_by__last_name']}"
|
||||
if issue["created_by__first_name"] and issue["created_by__last_name"]
|
||||
else ""
|
||||
),
|
||||
(
|
||||
f"{issue['assignees__first_name']} {issue['assignees__last_name']}"
|
||||
if issue["assignees__first_name"] and issue["assignees__last_name"]
|
||||
else ""
|
||||
),
|
||||
issue["labels__name"] if issue["labels__name"] else "",
|
||||
issue["issue_cycle__cycle__name"],
|
||||
dateConverter(issue["issue_cycle__cycle__start_date"]),
|
||||
dateConverter(issue["issue_cycle__cycle__end_date"]),
|
||||
issue["issue_module__module__name"],
|
||||
dateConverter(issue["issue_module__module__start_date"]),
|
||||
dateConverter(issue["issue_module__module__target_date"]),
|
||||
issue["created_by"],
|
||||
", ".join(issue["labels"]) if issue["labels"] else "",
|
||||
issue["cycle_name"],
|
||||
issue["cycle_start_date"],
|
||||
issue["cycle_end_date"],
|
||||
", ".join(issue.get("module_name", "")) if issue.get("module_name") else "",
|
||||
dateTimeConverter(issue["created_at"]),
|
||||
dateTimeConverter(issue["updated_at"]),
|
||||
dateTimeConverter(issue["completed_at"]),
|
||||
dateTimeConverter(issue["archived_at"]),
|
||||
(
|
||||
", ".join(
|
||||
[
|
||||
f"{comment['comment']} ({comment['created_at']} by {comment['created_by']})"
|
||||
for comment in issue["comments"]
|
||||
]
|
||||
)
|
||||
if issue["comments"]
|
||||
else ""
|
||||
),
|
||||
issue["estimate"] if issue["estimate"] else "",
|
||||
", ".join(issue["link"]) if issue["link"] else "",
|
||||
", ".join(issue["assignees"]) if issue["assignees"] else "",
|
||||
issue["subscribers_count"] if issue["subscribers_count"] else "",
|
||||
issue["attachment_count"] if issue["attachment_count"] else "",
|
||||
", ".join(issue["attachment_links"]) if issue["attachment_links"] else "",
|
||||
]
|
||||
|
||||
|
||||
def generate_json_row(issue):
|
||||
def generate_json_row(issue: dict) -> dict:
|
||||
"""
|
||||
Generate a JSON row from an issue dictionary.
|
||||
"""
|
||||
return {
|
||||
"ID": f"""{issue["project__identifier"]}-{issue["sequence_id"]}""",
|
||||
"Project": issue["project__name"],
|
||||
"ID": f"""{issue["project_identifier"]}-{issue["sequence_id"]}""",
|
||||
"Project": issue["project_name"],
|
||||
"Name": issue["name"],
|
||||
"Description": issue["description_stripped"],
|
||||
"State": issue["state__name"],
|
||||
"Description": issue["description"],
|
||||
"State": issue["state_name"],
|
||||
"Start Date": dateConverter(issue["start_date"]),
|
||||
"Target Date": dateConverter(issue["target_date"]),
|
||||
"Priority": issue["priority"],
|
||||
"Created By": (
|
||||
f"{issue['created_by__first_name']} {issue['created_by__last_name']}"
|
||||
if issue["created_by__first_name"] and issue["created_by__last_name"]
|
||||
else ""
|
||||
),
|
||||
"Assignee": (
|
||||
f"{issue['assignees__first_name']} {issue['assignees__last_name']}"
|
||||
if issue["assignees__first_name"] and issue["assignees__last_name"]
|
||||
else ""
|
||||
),
|
||||
"Labels": issue["labels__name"] if issue["labels__name"] else "",
|
||||
"Cycle Name": issue["issue_cycle__cycle__name"],
|
||||
"Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]),
|
||||
"Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]),
|
||||
"Module Name": issue["issue_module__module__name"],
|
||||
"Module Start Date": dateConverter(issue["issue_module__module__start_date"]),
|
||||
"Module Target Date": dateConverter(issue["issue_module__module__target_date"]),
|
||||
"Created By": (f"{issue['created_by']}" if issue["created_by"] else ""),
|
||||
"Assignee": issue["assignees"],
|
||||
"Labels": issue["labels"],
|
||||
"Cycle Name": issue["cycle_name"],
|
||||
"Cycle Start Date": issue["cycle_start_date"],
|
||||
"Cycle End Date": issue["cycle_end_date"],
|
||||
"Module Name": issue["module_name"],
|
||||
"Created At": dateTimeConverter(issue["created_at"]),
|
||||
"Updated At": dateTimeConverter(issue["updated_at"]),
|
||||
"Completed At": dateTimeConverter(issue["completed_at"]),
|
||||
"Archived At": dateTimeConverter(issue["archived_at"]),
|
||||
"Comments": issue["comments"],
|
||||
"Estimate": issue["estimate"],
|
||||
"Link": issue["link"],
|
||||
"Subscribers Count": issue["subscribers_count"],
|
||||
"Attachment Count": issue["attachment_count"],
|
||||
"Attachment Links": issue["attachment_links"],
|
||||
}
|
||||
|
||||
|
||||
def update_json_row(rows, row):
|
||||
def update_json_row(rows: List[dict], row: dict) -> None:
|
||||
"""
|
||||
Update the json row with the new assignee and label.
|
||||
"""
|
||||
matched_index = next(
|
||||
(
|
||||
index
|
||||
@@ -247,7 +283,10 @@ def update_json_row(rows, row):
|
||||
rows.append(row)
|
||||
|
||||
|
||||
def update_table_row(rows, row):
|
||||
def update_table_row(rows: List[List[str]], row: List[str]) -> None:
|
||||
"""
|
||||
Update the table row with the new assignee and label.
|
||||
"""
|
||||
matched_index = next(
|
||||
(index for index, existing_row in enumerate(rows) if existing_row[0] == row[0]),
|
||||
None,
|
||||
@@ -269,7 +308,7 @@ def update_table_row(rows, row):
|
||||
rows.append(row)
|
||||
|
||||
|
||||
def generate_csv(header, project_id, issues, files):
|
||||
def generate_csv(header: List[str], project_id: str, issues: List[dict], files: List[tuple[str, str | bytes]]) -> None:
|
||||
"""
|
||||
Generate CSV export for all the passed issues.
|
||||
"""
|
||||
@@ -281,7 +320,10 @@ def generate_csv(header, project_id, issues, files):
|
||||
files.append((f"{project_id}.csv", csv_file))
|
||||
|
||||
|
||||
def generate_json(header, project_id, issues, files):
|
||||
def generate_json(header: List[str], project_id: str, issues: List[dict], files: List[tuple[str, str | bytes]]) -> None:
|
||||
"""
|
||||
Generate JSON export for all the passed issues.
|
||||
"""
|
||||
rows = []
|
||||
for issue in issues:
|
||||
row = generate_json_row(issue)
|
||||
@@ -290,68 +332,157 @@ def generate_json(header, project_id, issues, files):
|
||||
files.append((f"{project_id}.json", json_file))
|
||||
|
||||
|
||||
def generate_xlsx(header, project_id, issues, files):
|
||||
def generate_xlsx(header: List[str], project_id: str, issues: List[dict], files: List[tuple[str, str | bytes]]) -> None:
|
||||
"""
|
||||
Generate XLSX export for all the passed issues.
|
||||
"""
|
||||
rows = [header]
|
||||
for issue in issues:
|
||||
row = generate_table_row(issue)
|
||||
|
||||
update_table_row(rows, row)
|
||||
xlsx_file = create_xlsx_file(rows)
|
||||
files.append((f"{project_id}.xlsx", xlsx_file))
|
||||
|
||||
|
||||
def get_created_by(obj: Issue | IssueComment) -> str:
|
||||
"""
|
||||
Get the created by user for the given object.
|
||||
"""
|
||||
if obj.created_by:
|
||||
return f"{obj.created_by.first_name} {obj.created_by.last_name}"
|
||||
return ""
|
||||
|
||||
|
||||
@shared_task
|
||||
def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, slug):
|
||||
def issue_export_task(provider: str, workspace_id: UUID, project_ids: List[str], token_id: str, multiple: bool, slug: str):
|
||||
"""
|
||||
Export issues from the workspace.
|
||||
provider (str): The provider to export the issues to csv | json | xlsx.
|
||||
token_id (str): The export object token id.
|
||||
multiple (bool): Whether to export the issues to multiple files per project.
|
||||
"""
|
||||
try:
|
||||
exporter_instance = ExporterHistory.objects.get(token=token_id)
|
||||
exporter_instance.status = "processing"
|
||||
exporter_instance.save(update_fields=["status"])
|
||||
|
||||
# Base query to get the issues
|
||||
workspace_issues = (
|
||||
(
|
||||
Issue.objects.filter(
|
||||
workspace__id=workspace_id,
|
||||
project_id__in=project_ids,
|
||||
project__project_projectmember__member=exporter_instance.initiated_by_id,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__archived_at__isnull=True,
|
||||
)
|
||||
.select_related("project", "workspace", "state", "parent", "created_by")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_cycle__cycle", "issue_module__module"
|
||||
)
|
||||
.values(
|
||||
"id",
|
||||
"project__identifier",
|
||||
"project__name",
|
||||
"project__id",
|
||||
"sequence_id",
|
||||
"name",
|
||||
"description_stripped",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"state__name",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"completed_at",
|
||||
"archived_at",
|
||||
"issue_cycle__cycle__name",
|
||||
"issue_cycle__cycle__start_date",
|
||||
"issue_cycle__cycle__end_date",
|
||||
"issue_module__module__name",
|
||||
"issue_module__module__start_date",
|
||||
"issue_module__module__target_date",
|
||||
"created_by__first_name",
|
||||
"created_by__last_name",
|
||||
"assignees__first_name",
|
||||
"assignees__last_name",
|
||||
"labels__name",
|
||||
)
|
||||
Issue.objects.filter(
|
||||
workspace__id=workspace_id,
|
||||
project_id__in=project_ids,
|
||||
project__project_projectmember__member=exporter_instance.initiated_by_id,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__archived_at__isnull=True,
|
||||
)
|
||||
.select_related(
|
||||
"project",
|
||||
"workspace",
|
||||
"state",
|
||||
"parent",
|
||||
"created_by",
|
||||
"estimate_point",
|
||||
)
|
||||
.prefetch_related(
|
||||
"labels",
|
||||
"issue_cycle__cycle",
|
||||
"issue_module__module",
|
||||
"issue_comments",
|
||||
"assignees",
|
||||
Prefetch(
|
||||
"assignees",
|
||||
queryset=User.objects.only("first_name", "last_name").distinct(),
|
||||
to_attr="assignee_details",
|
||||
),
|
||||
Prefetch(
|
||||
"labels",
|
||||
queryset=Label.objects.only("name").distinct(),
|
||||
to_attr="label_details",
|
||||
),
|
||||
"issue_subscribers",
|
||||
"issue_link",
|
||||
)
|
||||
.order_by("project__identifier", "sequence_id")
|
||||
.distinct()
|
||||
)
|
||||
# CSV header
|
||||
|
||||
# Get the attachments for the issues
|
||||
file_assets = FileAsset.objects.filter(
|
||||
issue_id__in=workspace_issues.values_list("id", flat=True),
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT
|
||||
).annotate(work_item_id=F("issue_id"), asset_id=F("id"))
|
||||
|
||||
# Create a dictionary to store the attachments for the issues
|
||||
attachment_dict = defaultdict(list)
|
||||
for asset in file_assets:
|
||||
attachment_dict[asset.work_item_id].append(asset.asset_id)
|
||||
|
||||
# Create a list to store the issues data
|
||||
issues_data = []
|
||||
|
||||
# Iterate over the issues
|
||||
for issue in workspace_issues:
|
||||
attachments = attachment_dict.get(issue.id, [])
|
||||
|
||||
issue_data = {
|
||||
"id": issue.id,
|
||||
"project_identifier": issue.project.identifier,
|
||||
"project_name": issue.project.name,
|
||||
"project_id": issue.project.id,
|
||||
"sequence_id": issue.sequence_id,
|
||||
"name": issue.name,
|
||||
"description": issue.description_stripped,
|
||||
"priority": issue.priority,
|
||||
"start_date": issue.start_date,
|
||||
"target_date": issue.target_date,
|
||||
"state_name": issue.state.name if issue.state else None,
|
||||
"created_at": issue.created_at,
|
||||
"updated_at": issue.updated_at,
|
||||
"completed_at": issue.completed_at,
|
||||
"archived_at": issue.archived_at,
|
||||
"module_name": [
|
||||
module.module.name for module in issue.issue_module.all()
|
||||
],
|
||||
"created_by": get_created_by(issue),
|
||||
"labels": [label.name for label in issue.label_details],
|
||||
"comments": [
|
||||
{
|
||||
"comment": comment.comment_stripped,
|
||||
"created_at": dateConverter(comment.created_at),
|
||||
"created_by": get_created_by(comment),
|
||||
}
|
||||
for comment in issue.issue_comments.all()
|
||||
],
|
||||
"estimate": issue.estimate_point.estimate.name
|
||||
if issue.estimate_point and issue.estimate_point.estimate
|
||||
else "",
|
||||
"link": [link.url for link in issue.issue_link.all()],
|
||||
"assignees": [
|
||||
f"{assignee.first_name} {assignee.last_name}"
|
||||
for assignee in issue.assignee_details
|
||||
],
|
||||
"subscribers_count": issue.issue_subscribers.count(),
|
||||
"attachment_count": len(attachments),
|
||||
"attachment_links": [
|
||||
f"/api/assets/v2/workspaces/{issue.workspace.slug}/projects/{issue.project_id}/issues/{issue.id}/attachments/{asset}/"
|
||||
for asset in attachments
|
||||
],
|
||||
}
|
||||
|
||||
# Get Cycles data for the issue
|
||||
cycle = issue.issue_cycle.last()
|
||||
if cycle:
|
||||
# Update cycle data
|
||||
issue_data["cycle_name"] = cycle.cycle.name
|
||||
issue_data["cycle_start_date"] = dateConverter(cycle.cycle.start_date)
|
||||
issue_data["cycle_end_date"] = dateConverter(cycle.cycle.end_date)
|
||||
else:
|
||||
issue_data["cycle_name"] = ""
|
||||
issue_data["cycle_start_date"] = ""
|
||||
issue_data["cycle_end_date"] = ""
|
||||
|
||||
issues_data.append(issue_data)
|
||||
|
||||
# CSV header
|
||||
header = [
|
||||
"ID",
|
||||
"Project",
|
||||
@@ -362,20 +493,25 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
|
||||
"Target Date",
|
||||
"Priority",
|
||||
"Created By",
|
||||
"Assignee",
|
||||
"Labels",
|
||||
"Cycle Name",
|
||||
"Cycle Start Date",
|
||||
"Cycle End Date",
|
||||
"Module Name",
|
||||
"Module Start Date",
|
||||
"Module Target Date",
|
||||
"Created At",
|
||||
"Updated At",
|
||||
"Completed At",
|
||||
"Archived At",
|
||||
"Comments",
|
||||
"Estimate",
|
||||
"Link",
|
||||
"Assignees",
|
||||
"Subscribers Count",
|
||||
"Attachment Count",
|
||||
"Attachment Links",
|
||||
]
|
||||
|
||||
# Map the provider to the function
|
||||
EXPORTER_MAPPER = {
|
||||
"csv": generate_csv,
|
||||
"json": generate_json,
|
||||
@@ -384,8 +520,13 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
|
||||
|
||||
files = []
|
||||
if multiple:
|
||||
project_dict = defaultdict(list)
|
||||
for issue in issues_data:
|
||||
project_dict[str(issue["project_id"])].append(issue)
|
||||
|
||||
for project_id in project_ids:
|
||||
issues = workspace_issues.filter(project__id=project_id)
|
||||
issues = project_dict.get(str(project_id), [])
|
||||
|
||||
exporter = EXPORTER_MAPPER.get(provider)
|
||||
if exporter is not None:
|
||||
exporter(header, project_id, issues, files)
|
||||
@@ -393,7 +534,7 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
|
||||
else:
|
||||
exporter = EXPORTER_MAPPER.get(provider)
|
||||
if exporter is not None:
|
||||
exporter(header, workspace_id, workspace_issues, files)
|
||||
exporter(header, workspace_id, issues_data, files)
|
||||
|
||||
zip_buffer = create_zip_file(files)
|
||||
upload_to_s3(zip_buffer, workspace_id, token_id, slug)
|
||||
|
||||
@@ -63,7 +63,7 @@ def forgot_password(first_name, email, uidb64, token, current_site):
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -32,7 +32,7 @@ from plane.settings.redis import redis_instance
|
||||
from plane.utils.exception_logger import log_exception
|
||||
from plane.bgtasks.webhook_task import webhook_activity
|
||||
from plane.utils.issue_relation_mapper import get_inverse_relation
|
||||
from plane.utils.valid_uuid import is_valid_uuid
|
||||
from plane.utils.uuid import is_valid_uuid
|
||||
|
||||
|
||||
# Track Changes in name
|
||||
@@ -307,6 +307,10 @@ def track_labels(
|
||||
|
||||
# Set of newly added labels
|
||||
for added_label in added_labels:
|
||||
# validate uuids
|
||||
if not is_valid_uuid(added_label):
|
||||
continue
|
||||
|
||||
label = Label.objects.get(pk=added_label)
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
@@ -327,6 +331,10 @@ def track_labels(
|
||||
|
||||
# Set of dropped labels
|
||||
for dropped_label in dropped_labels:
|
||||
# validate uuids
|
||||
if not is_valid_uuid(dropped_label):
|
||||
continue
|
||||
|
||||
label = Label.objects.get(pk=dropped_label)
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
@@ -373,6 +381,10 @@ def track_assignees(
|
||||
|
||||
bulk_subscribers = []
|
||||
for added_asignee in added_assignees:
|
||||
# validate uuids
|
||||
if not is_valid_uuid(added_asignee):
|
||||
continue
|
||||
|
||||
assignee = User.objects.get(pk=added_asignee)
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
@@ -406,6 +418,10 @@ def track_assignees(
|
||||
)
|
||||
|
||||
for dropped_assignee in dropped_assginees:
|
||||
# validate uuids
|
||||
if not is_valid_uuid(dropped_assignee):
|
||||
continue
|
||||
|
||||
assignee = User.objects.get(pk=dropped_assignee)
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
@@ -466,7 +482,7 @@ def track_estimate_points(
|
||||
),
|
||||
old_value=old_estimate.value if old_estimate else None,
|
||||
new_value=new_estimate.value if new_estimate else None,
|
||||
field="estimate_point",
|
||||
field="estimate_" + new_estimate.estimate.type,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
comment="updated the estimate point to ",
|
||||
@@ -1634,40 +1650,6 @@ def issue_activity(
|
||||
|
||||
# Save all the values to database
|
||||
issue_activities_created = IssueActivity.objects.bulk_create(issue_activities)
|
||||
# Post the updates to segway for integrations and webhooks
|
||||
if len(issue_activities_created):
|
||||
for activity in issue_activities_created:
|
||||
webhook_activity.delay(
|
||||
event=(
|
||||
"issue_comment"
|
||||
if activity.field == "comment"
|
||||
else "intake_issue"
|
||||
if intake
|
||||
else "issue"
|
||||
),
|
||||
event_id=(
|
||||
activity.issue_comment_id
|
||||
if activity.field == "comment"
|
||||
else intake
|
||||
if intake
|
||||
else activity.issue_id
|
||||
),
|
||||
verb=activity.verb,
|
||||
field=(
|
||||
"description" if activity.field == "comment" else activity.field
|
||||
),
|
||||
old_value=(
|
||||
activity.old_value if activity.old_value != "" else None
|
||||
),
|
||||
new_value=(
|
||||
activity.new_value if activity.new_value != "" else None
|
||||
),
|
||||
actor_id=activity.actor_id,
|
||||
current_site=origin,
|
||||
slug=activity.workspace.slug,
|
||||
old_identifier=activity.old_identifier,
|
||||
new_identifier=activity.new_identifier,
|
||||
)
|
||||
|
||||
if notification:
|
||||
notifications.delay(
|
||||
|
||||
@@ -16,7 +16,7 @@ from plane.utils.exception_logger import log_exception
|
||||
|
||||
|
||||
@shared_task
|
||||
def magic_link(email, key, token, current_site):
|
||||
def magic_link(email, key, token):
|
||||
try:
|
||||
(
|
||||
EMAIL_HOST,
|
||||
@@ -53,7 +53,7 @@ def magic_link(email, key, token, current_site):
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -80,7 +80,7 @@ def project_add_user_email(current_site, project_member_id, invitor_id):
|
||||
# Send the email
|
||||
msg.send()
|
||||
# Log the success
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -76,7 +76,7 @@ def project_invitation(email, project_id, token, current_site, invitor):
|
||||
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except (Project.DoesNotExist, ProjectMemberInvite.DoesNotExist):
|
||||
return
|
||||
|
||||
@@ -58,7 +58,7 @@ def user_activation_email(current_site, user_id):
|
||||
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -60,7 +60,7 @@ def user_deactivation_email(current_site, user_id):
|
||||
# Attach HTML content
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user