mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Compare commits
185 Commits
fix/intake
...
refactor-e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5a1f8e6e39 | ||
|
|
16d63abcdc | ||
|
|
0568b8d583 | ||
|
|
64da29b0d9 | ||
|
|
7c336a65c4 | ||
|
|
2242a85e5c | ||
|
|
323920a358 | ||
|
|
151fc8389e | ||
|
|
0f828fd5e0 | ||
|
|
67cbe94d4a | ||
|
|
322af8c436 | ||
|
|
41c2aefad4 | ||
|
|
445c819fbd | ||
|
|
046a8a1bcf | ||
|
|
099a1cc12b | ||
|
|
a0a697401b | ||
|
|
cb92108bf4 | ||
|
|
01b685ea57 | ||
|
|
b16a585102 | ||
|
|
4a97d7c28c | ||
|
|
141cb17e8a | ||
|
|
26b62c4a70 | ||
|
|
e388a9a279 | ||
|
|
a3a580923c | ||
|
|
b4bc49971c | ||
|
|
04c7c53e09 | ||
|
|
78cc32765b | ||
|
|
4e485d6402 | ||
|
|
5a208cb1b9 | ||
|
|
0eafbb698a | ||
|
|
193ae9bfc8 | ||
|
|
7cb5a9120a | ||
|
|
84fc81dd98 | ||
|
|
2d0c0c7f8a | ||
|
|
5c9bdb1cea | ||
|
|
f8ca1e46b1 | ||
|
|
a3b9152a9b | ||
|
|
5223bd01e8 | ||
|
|
6eb0b5ddb0 | ||
|
|
cd200169b6 | ||
|
|
037bb88b53 | ||
|
|
643390e723 | ||
|
|
731c4e8fcd | ||
|
|
6216ad77f4 | ||
|
|
9812129ad3 | ||
|
|
5226b17f90 | ||
|
|
b376e5300a | ||
|
|
4460529b37 | ||
|
|
0a8cc24da5 | ||
|
|
2f4aa843fc | ||
|
|
cfac8ce350 | ||
|
|
75a11ba31a | ||
|
|
1fc3709731 | ||
|
|
7e21618762 | ||
|
|
2d475491e9 | ||
|
|
2a2feaf88e | ||
|
|
e48b2da623 | ||
|
|
9c9952a823 | ||
|
|
906ce8b500 | ||
|
|
6c483fad2f | ||
|
|
5b776392bd | ||
|
|
ba158d5d6e | ||
|
|
084cc75726 | ||
|
|
534f5c7dd0 | ||
|
|
080cf70e3f | ||
|
|
4c3f7f27a5 | ||
|
|
803f6cc62a | ||
|
|
3a6d0c11fb | ||
|
|
75d81f9e95 | ||
|
|
0d5c7c6653 | ||
|
|
079c3a3a99 | ||
|
|
5f8d5ea388 | ||
|
|
8613a80b16 | ||
|
|
dc16f2862e | ||
|
|
e68d344410 | ||
|
|
26c8cba322 | ||
|
|
b435ceedfc | ||
|
|
13c46e0fdf | ||
|
|
02bccb44d6 | ||
|
|
b5634f5fa1 | ||
|
|
64aae0a2ac | ||
|
|
a263bfc01f | ||
|
|
50082f0843 | ||
|
|
30db59534d | ||
|
|
e401c9d6e4 | ||
|
|
39b5736c83 | ||
|
|
2785419d12 | ||
|
|
ac5b974d67 | ||
|
|
14ebaf0799 | ||
|
|
7cdb622663 | ||
|
|
855e4a3218 | ||
|
|
d456767492 | ||
|
|
6faff1d556 | ||
|
|
bc2936dcd3 | ||
|
|
d366ac1581 | ||
|
|
0a01e0eb41 | ||
|
|
b4cc2d83fe | ||
|
|
42e2b787f0 | ||
|
|
fbca9d9a7a | ||
|
|
dbc00e4add | ||
|
|
28f9733d1b | ||
|
|
1e46290727 | ||
|
|
5a1df8b496 | ||
|
|
f23a2f0780 | ||
|
|
d10bb0b638 | ||
|
|
c4ddff5419 | ||
|
|
10f5b4e9b8 | ||
|
|
cdca5a4126 | ||
|
|
14dc6a56bc | ||
|
|
55340f9f48 | ||
|
|
efa64fc4b8 | ||
|
|
f5449c8f93 | ||
|
|
baabb82669 | ||
|
|
298e3dc9ca | ||
|
|
190300bc6c | ||
|
|
550fe547e2 | ||
|
|
f278a284c4 | ||
|
|
2bcf6c76cd | ||
|
|
fb3e022042 | ||
|
|
e3fbb7b073 | ||
|
|
cce6dd581c | ||
|
|
d86ac368a4 | ||
|
|
101994840a | ||
|
|
f60f57ef11 | ||
|
|
546217f09b | ||
|
|
6df8323665 | ||
|
|
77d022df71 | ||
|
|
797f150ec4 | ||
|
|
b54f54999e | ||
|
|
dff176be8f | ||
|
|
2bbaaed3ea | ||
|
|
b5ceb94fb2 | ||
|
|
feb6243065 | ||
|
|
5dacba74c9 | ||
|
|
0efb0c239c | ||
|
|
c8be836d6c | ||
|
|
833b82e247 | ||
|
|
280aa7f671 | ||
|
|
eac1115566 | ||
|
|
8166a757a7 | ||
|
|
be5d77d978 | ||
|
|
18fb3b8450 | ||
|
|
ef5616905e | ||
|
|
aeb41e603c | ||
|
|
55eea1a8b7 | ||
|
|
fa87ff14b7 | ||
|
|
7d91b5f8df | ||
|
|
3ce40dfa2f | ||
|
|
f65253c994 | ||
|
|
97fcfaa653 | ||
|
|
0e1ebff978 | ||
|
|
642dabfe35 | ||
|
|
48557cb670 | ||
|
|
608da1465c | ||
|
|
dbcc7bedb4 | ||
|
|
c401b26dd4 | ||
|
|
a4bca0c39c | ||
|
|
24899887b2 | ||
|
|
c6953ff878 | ||
|
|
06be9ab81b | ||
|
|
ed8d00acb1 | ||
|
|
915e374485 | ||
|
|
1d5b93cebd | ||
|
|
df65b8c34a | ||
|
|
4c688b1d25 | ||
|
|
bfc6ed839f | ||
|
|
b68396a4b2 | ||
|
|
b4fc715aba | ||
|
|
33a1b916cb | ||
|
|
2818310619 | ||
|
|
882520b3c7 | ||
|
|
20132e7544 | ||
|
|
0ae57b49d2 | ||
|
|
d347269afb | ||
|
|
a3fd616ec4 | ||
|
|
9eeff158d5 | ||
|
|
ef20b5814e | ||
|
|
14914e8716 | ||
|
|
b738e39a4a | ||
|
|
993c7899b6 | ||
|
|
2b411de1e3 | ||
|
|
1f9222065e | ||
|
|
670134562f | ||
|
|
144c793e9e | ||
|
|
0a924e4824 |
@@ -2,6 +2,7 @@
|
||||
*.pyc
|
||||
.env
|
||||
venv
|
||||
.venv
|
||||
node_modules/
|
||||
**/node_modules/
|
||||
npm-debug.log
|
||||
@@ -14,4 +15,4 @@ build/
|
||||
out/
|
||||
**/out/
|
||||
dist/
|
||||
**/dist/
|
||||
**/dist/
|
||||
|
||||
2
.github/workflows/build-branch.yml
vendored
2
.github/workflows/build-branch.yml
vendored
@@ -273,7 +273,7 @@ jobs:
|
||||
run: |
|
||||
cp ./deploy/selfhost/install.sh deploy/selfhost/setup.sh
|
||||
sed -i 's/${APP_RELEASE:-stable}/${APP_RELEASE:-'${REL_VERSION}'}/g' deploy/selfhost/docker-compose.yml
|
||||
sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deploy/selfhost/variables.env
|
||||
# sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deploy/selfhost/variables.env
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,5 +1,6 @@
|
||||
node_modules
|
||||
.next
|
||||
.yarn
|
||||
|
||||
### NextJS ###
|
||||
# Dependencies
|
||||
@@ -52,6 +53,8 @@ mediafiles
|
||||
.env
|
||||
.DS_Store
|
||||
logs/
|
||||
htmlcov/
|
||||
.coverage
|
||||
|
||||
node_modules/
|
||||
assets/dist/
|
||||
|
||||
1
.yarnrc.yml
Normal file
1
.yarnrc.yml
Normal file
@@ -0,0 +1 @@
|
||||
nodeLinker: node-modules
|
||||
@@ -15,14 +15,33 @@ Without said minimal reproduction, we won't be able to investigate all [issues](
|
||||
|
||||
You can open a new issue with this [issue form](https://github.com/makeplane/plane/issues/new).
|
||||
|
||||
### Naming conventions for issues
|
||||
|
||||
When opening a new issue, please use a clear and concise title that follows this format:
|
||||
|
||||
- For bugs: `🐛 Bug: [short description]`
|
||||
- For features: `🚀 Feature: [short description]`
|
||||
- For improvements: `🛠️ Improvement: [short description]`
|
||||
- For documentation: `📘 Docs: [short description]`
|
||||
|
||||
**Examples:**
|
||||
- `🐛 Bug: API token expiry time not saving correctly`
|
||||
- `📘 Docs: Clarify RAM requirement for local setup`
|
||||
- `🚀 Feature: Allow custom time selection for token expiration`
|
||||
|
||||
This helps us triage and manage issues more efficiently.
|
||||
|
||||
## Projects setup and Architecture
|
||||
|
||||
### Requirements
|
||||
|
||||
- Node.js version v16.18.0
|
||||
- Docker Engine installed and running
|
||||
- Node.js version 20+ [LTS version](https://nodejs.org/en/about/previous-releases)
|
||||
- Python version 3.8+
|
||||
- Postgres version v14
|
||||
- Redis version v6.2.7
|
||||
- **Memory**: Minimum **12 GB RAM** recommended
|
||||
> ⚠️ Running the project on a system with only 8 GB RAM may lead to setup failures or memory crashes (especially during Docker container build/start or dependency install). Use cloud environments like GitHub Codespaces or upgrade local RAM if possible.
|
||||
|
||||
### Setup the project
|
||||
|
||||
@@ -50,6 +69,17 @@ chmod +x setup.sh
|
||||
docker compose -f docker-compose-local.yml up
|
||||
```
|
||||
|
||||
5. Start web apps:
|
||||
|
||||
```bash
|
||||
yarn dev
|
||||
```
|
||||
|
||||
6. Open your browser to http://localhost:3001/god-mode/ and register yourself as instance admin
|
||||
7. Open up your browser to http://localhost:3000 then log in using the same credentials from the previous step
|
||||
|
||||
That’s it! You’re all set to begin coding. Remember to refresh your browser if changes don’t auto-reload. Happy contributing! 🎉
|
||||
|
||||
## Missing a Feature?
|
||||
|
||||
If a feature is missing, you can directly _request_ a new one [here](https://github.com/makeplane/plane/issues/new?assignees=&labels=feature&template=feature_request.yml&title=%F0%9F%9A%80+Feature%3A+). You also can do the same by choosing "🚀 Feature" when raising a [New Issue](https://github.com/makeplane/plane/issues/new/choose) on our GitHub Repository.
|
||||
@@ -75,7 +105,7 @@ To ensure consistency throughout the source code, please keep these rules in min
|
||||
- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
|
||||
|
||||
## Contributing to language support
|
||||
This guide is designed to help contributors understand how to add or update translations in the application.
|
||||
This guide is designed to help contributors understand how to add or update translations in the application.
|
||||
|
||||
### Understanding translation structure
|
||||
|
||||
@@ -90,7 +120,7 @@ packages/i18n/src/locales/
|
||||
├── fr/
|
||||
│ └── translations.json
|
||||
└── [language]/
|
||||
└── translations.json
|
||||
└── translations.json
|
||||
```
|
||||
#### Nested structure
|
||||
To keep translations organized, we use a nested structure for keys. This makes it easier to manage and locate specific translations. For example:
|
||||
@@ -110,14 +140,14 @@ To keep translations organized, we use a nested structure for keys. This makes i
|
||||
We use [IntlMessageFormat](https://formatjs.github.io/docs/intl-messageformat/) to handle dynamic content, such as variables and pluralization. Here's how to format your translations:
|
||||
|
||||
#### Examples
|
||||
- **Simple variables**
|
||||
- **Simple variables**
|
||||
```json
|
||||
{
|
||||
"greeting": "Hello, {name}!"
|
||||
}
|
||||
```
|
||||
|
||||
- **Pluralization**
|
||||
- **Pluralization**
|
||||
```json
|
||||
{
|
||||
"items": "{count, plural, one {Work item} other {Work items}}"
|
||||
@@ -142,15 +172,15 @@ We use [IntlMessageFormat](https://formatjs.github.io/docs/intl-messageformat/)
|
||||
### Adding new languages
|
||||
Adding a new language involves several steps to ensure it integrates seamlessly with the project. Follow these instructions carefully:
|
||||
|
||||
1. **Update type definitions**
|
||||
1. **Update type definitions**
|
||||
Add the new language to the TLanguage type in the language definitions file:
|
||||
|
||||
```typescript
|
||||
// types/language.ts
|
||||
export type TLanguage = "en" | "fr" | "your-lang";
|
||||
```
|
||||
```
|
||||
|
||||
2. **Add language configuration**
|
||||
2. **Add language configuration**
|
||||
Include the new language in the list of supported languages:
|
||||
|
||||
```typescript
|
||||
@@ -161,14 +191,14 @@ Include the new language in the list of supported languages:
|
||||
];
|
||||
```
|
||||
|
||||
3. **Create translation files**
|
||||
3. **Create translation files**
|
||||
1. Create a new folder for your language under locales (e.g., `locales/your-lang/`).
|
||||
|
||||
2. Add a `translations.json` file inside the folder.
|
||||
|
||||
3. Copy the structure from an existing translation file and translate all keys.
|
||||
|
||||
4. **Update import logic**
|
||||
4. **Update import logic**
|
||||
Modify the language import logic to include your new language:
|
||||
|
||||
```typescript
|
||||
|
||||
55
README.md
55
README.md
@@ -47,10 +47,10 @@ Meet [Plane](https://plane.so/), an open-source project management tool to track
|
||||
|
||||
Getting started with Plane is simple. Choose the setup that works best for you:
|
||||
|
||||
- **Plane Cloud**
|
||||
- **Plane Cloud**
|
||||
Sign up for a free account on [Plane Cloud](https://app.plane.so)—it's the fastest way to get up and running without worrying about infrastructure.
|
||||
|
||||
- **Self-host Plane**
|
||||
- **Self-host Plane**
|
||||
Prefer full control over your data and infrastructure? Install and run Plane on your own servers. Follow our detailed [deployment guides](https://developers.plane.so/self-hosting/overview) to get started.
|
||||
|
||||
| Installation methods | Docs link |
|
||||
@@ -62,22 +62,22 @@ Prefer full control over your data and infrastructure? Install and run Plane on
|
||||
|
||||
## 🌟 Features
|
||||
|
||||
- **Issues**
|
||||
- **Issues**
|
||||
Efficiently create and manage tasks with a robust rich text editor that supports file uploads. Enhance organization and tracking by adding sub-properties and referencing related issues.
|
||||
|
||||
- **Cycles**
|
||||
- **Cycles**
|
||||
Maintain your team’s momentum with Cycles. Track progress effortlessly using burn-down charts and other insightful tools.
|
||||
|
||||
- **Modules**
|
||||
Simplify complex projects by dividing them into smaller, manageable modules.
|
||||
- **Modules**
|
||||
Simplify complex projects by dividing them into smaller, manageable modules.
|
||||
|
||||
- **Views**
|
||||
- **Views**
|
||||
Customize your workflow by creating filters to display only the most relevant issues. Save and share these views with ease.
|
||||
|
||||
- **Pages**
|
||||
- **Pages**
|
||||
Capture and organize ideas using Plane Pages, complete with AI capabilities and a rich text editor. Format text, insert images, add hyperlinks, or convert your notes into actionable items.
|
||||
|
||||
- **Analytics**
|
||||
- **Analytics**
|
||||
Access real-time insights across all your Plane data. Visualize trends, remove blockers, and keep your projects moving forward.
|
||||
|
||||
- **Drive** (_coming soon_): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution.
|
||||
@@ -85,38 +85,7 @@ Access real-time insights across all your Plane data. Visualize trends, remove b
|
||||
|
||||
## 🛠️ Local development
|
||||
|
||||
### Pre-requisites
|
||||
- Ensure Docker Engine is installed and running.
|
||||
|
||||
### Development setup
|
||||
Setting up your local environment is simple and straightforward. Follow these steps to get started:
|
||||
|
||||
1. Clone the repository:
|
||||
```
|
||||
git clone https://github.com/makeplane/plane.git
|
||||
```
|
||||
2. Navigate to the project folder:
|
||||
```
|
||||
cd plane
|
||||
```
|
||||
3. Create a new branch for your feature or fix:
|
||||
```
|
||||
git checkout -b <feature-branch-name>
|
||||
```
|
||||
4. Run the setup script in the terminal:
|
||||
```
|
||||
./setup.sh
|
||||
```
|
||||
5. Open the project in an IDE such as VS Code.
|
||||
|
||||
6. Review the `.env` files in the relevant folders. Refer to [Environment Setup](./ENV_SETUP.md) for details on the environment variables used.
|
||||
|
||||
7. Start the services using Docker:
|
||||
```
|
||||
docker compose -f docker-compose-local.yml up -d
|
||||
```
|
||||
|
||||
That’s it! You’re all set to begin coding. Remember to refresh your browser if changes don’t auto-reload. Happy contributing! 🎉
|
||||
See [CONTRIBUTING](./CONTRIBUTING.md)
|
||||
|
||||
## ⚙️ Built with
|
||||
[](https://nextjs.org/)
|
||||
@@ -194,7 +163,7 @@ Feel free to ask questions, report bugs, participate in discussions, share ideas
|
||||
|
||||
If you discover a security vulnerability in Plane, please report it responsibly instead of opening a public issue. We take all legitimate reports seriously and will investigate them promptly. See [Security policy](https://github.com/makeplane/plane/blob/master/SECURITY.md) for more info.
|
||||
|
||||
To disclose any security issues, please email us at security@plane.so.
|
||||
To disclose any security issues, please email us at security@plane.so.
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
@@ -219,4 +188,4 @@ Please read [CONTRIBUTING.md](https://github.com/makeplane/plane/blob/master/CON
|
||||
|
||||
|
||||
## License
|
||||
This project is licensed under the [GNU Affero General Public License v3.0](https://github.com/makeplane/plane/blob/master/LICENSE.txt).
|
||||
This project is licensed under the [GNU Affero General Public License v3.0](https://github.com/makeplane/plane/blob/master/LICENSE.txt).
|
||||
|
||||
@@ -1,3 +1,12 @@
|
||||
NEXT_PUBLIC_API_BASE_URL=""
|
||||
NEXT_PUBLIC_API_BASE_URL="http://localhost:8000"
|
||||
|
||||
NEXT_PUBLIC_WEB_BASE_URL="http://localhost:3000"
|
||||
|
||||
NEXT_PUBLIC_ADMIN_BASE_URL="http://localhost:3001"
|
||||
NEXT_PUBLIC_ADMIN_BASE_PATH="/god-mode"
|
||||
NEXT_PUBLIC_WEB_BASE_URL=""
|
||||
|
||||
NEXT_PUBLIC_SPACE_BASE_URL="http://localhost:3002"
|
||||
NEXT_PUBLIC_SPACE_BASE_PATH="/spaces"
|
||||
|
||||
NEXT_PUBLIC_LIVE_BASE_URL="http://localhost:3100"
|
||||
NEXT_PUBLIC_LIVE_BASE_PATH="/live"
|
||||
|
||||
@@ -26,16 +26,16 @@ export const InstanceAIForm: FC<IInstanceAIForm> = (props) => {
|
||||
formState: { errors, isSubmitting },
|
||||
} = useForm<AIFormValues>({
|
||||
defaultValues: {
|
||||
OPENAI_API_KEY: config["OPENAI_API_KEY"],
|
||||
GPT_ENGINE: config["GPT_ENGINE"],
|
||||
LLM_API_KEY: config["LLM_API_KEY"],
|
||||
LLM_MODEL: config["LLM_MODEL"],
|
||||
},
|
||||
});
|
||||
|
||||
const aiFormFields: TControllerInputFormField[] = [
|
||||
{
|
||||
key: "GPT_ENGINE",
|
||||
key: "LLM_MODEL",
|
||||
type: "text",
|
||||
label: "GPT_ENGINE",
|
||||
label: "LLM Model",
|
||||
description: (
|
||||
<>
|
||||
Choose an OpenAI engine.{" "}
|
||||
@@ -49,12 +49,12 @@ export const InstanceAIForm: FC<IInstanceAIForm> = (props) => {
|
||||
</a>
|
||||
</>
|
||||
),
|
||||
placeholder: "gpt-3.5-turbo",
|
||||
error: Boolean(errors.GPT_ENGINE),
|
||||
placeholder: "gpt-4o-mini",
|
||||
error: Boolean(errors.LLM_MODEL),
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
key: "OPENAI_API_KEY",
|
||||
key: "LLM_API_KEY",
|
||||
type: "password",
|
||||
label: "API key",
|
||||
description: (
|
||||
@@ -71,7 +71,7 @@ export const InstanceAIForm: FC<IInstanceAIForm> = (props) => {
|
||||
</>
|
||||
),
|
||||
placeholder: "sk-asddassdfasdefqsdfasd23das3dasdcasd",
|
||||
error: Boolean(errors.OPENAI_API_KEY),
|
||||
error: Boolean(errors.LLM_API_KEY),
|
||||
required: false,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -98,11 +98,7 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
key: "GITHUB_ORGANIZATION_ID",
|
||||
type: "text",
|
||||
label: "Organization ID",
|
||||
description: (
|
||||
<>
|
||||
The organization github ID.
|
||||
</>
|
||||
),
|
||||
description: <>The organization github ID.</>,
|
||||
placeholder: "123456789",
|
||||
error: Boolean(errors.GITHUB_ORGANIZATION_ID),
|
||||
required: false,
|
||||
|
||||
@@ -3,18 +3,16 @@
|
||||
import { ReactNode } from "react";
|
||||
import { ThemeProvider, useTheme } from "next-themes";
|
||||
import { SWRConfig } from "swr";
|
||||
// ui
|
||||
// plane imports
|
||||
import { ADMIN_BASE_PATH, DEFAULT_SWR_CONFIG } from "@plane/constants";
|
||||
import { Toast } from "@plane/ui";
|
||||
import { resolveGeneralTheme } from "@plane/utils";
|
||||
// constants
|
||||
// helpers
|
||||
// lib
|
||||
import { InstanceProvider } from "@/lib/instance-provider";
|
||||
import { StoreProvider } from "@/lib/store-provider";
|
||||
import { UserProvider } from "@/lib/user-provider";
|
||||
// styles
|
||||
import "./globals.css";
|
||||
import "@/styles/globals.css";
|
||||
|
||||
const ToastWithTheme = () => {
|
||||
const { resolvedTheme } = useTheme();
|
||||
|
||||
@@ -7,7 +7,7 @@ import { LogOut, UserCog2, Palette } from "lucide-react";
|
||||
import { Menu, Transition } from "@headlessui/react";
|
||||
// plane internal packages
|
||||
import { API_BASE_URL } from "@plane/constants";
|
||||
import {AuthService } from "@plane/services";
|
||||
import { AuthService } from "@plane/services";
|
||||
import { Avatar } from "@plane/ui";
|
||||
import { getFileURL, cn } from "@plane/utils";
|
||||
// hooks
|
||||
|
||||
@@ -2,7 +2,7 @@ import set from "lodash/set";
|
||||
import { observable, action, computed, makeObservable, runInAction } from "mobx";
|
||||
// plane internal packages
|
||||
import { EInstanceStatus, TInstanceStatus } from "@plane/constants";
|
||||
import {InstanceService} from "@plane/services";
|
||||
import { InstanceService } from "@plane/services";
|
||||
import {
|
||||
IInstance,
|
||||
IInstanceAdmin,
|
||||
|
||||
@@ -1 +1 @@
|
||||
export * from "ce/components/authentication/authentication-modes";
|
||||
export * from "ce/components/authentication/authentication-modes";
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "admin",
|
||||
"description": "Admin UI for Plane",
|
||||
"version": "0.25.3",
|
||||
"version": "0.26.0",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
@@ -10,6 +10,7 @@
|
||||
"build": "next build",
|
||||
"preview": "next build && next start",
|
||||
"start": "next start",
|
||||
"format": "prettier --write .",
|
||||
"lint": "eslint . --ext .ts,.tsx",
|
||||
"lint:errors": "eslint . --ext .ts,.tsx --quiet"
|
||||
},
|
||||
@@ -17,10 +18,11 @@
|
||||
"@headlessui/react": "^1.7.19",
|
||||
"@plane/constants": "*",
|
||||
"@plane/hooks": "*",
|
||||
"@plane/propel": "*",
|
||||
"@plane/services": "*",
|
||||
"@plane/types": "*",
|
||||
"@plane/ui": "*",
|
||||
"@plane/utils": "*",
|
||||
"@plane/services": "*",
|
||||
"@tailwindcss/typography": "^0.5.9",
|
||||
"@types/lodash": "^4.17.0",
|
||||
"autoprefixer": "10.4.14",
|
||||
@@ -29,7 +31,7 @@
|
||||
"lucide-react": "^0.469.0",
|
||||
"mobx": "^6.12.0",
|
||||
"mobx-react": "^9.1.1",
|
||||
"next": "^14.2.26",
|
||||
"next": "^14.2.29",
|
||||
"next-themes": "^0.2.1",
|
||||
"postcss": "^8.4.38",
|
||||
"react": "^18.3.1",
|
||||
|
||||
@@ -1,8 +1,2 @@
|
||||
module.exports = {
|
||||
plugins: {
|
||||
"postcss-import": {},
|
||||
"tailwindcss/nesting": {},
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
};
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
module.exports = require("@plane/tailwind-config/postcss.config.js");
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
@import url("https://fonts.googleapis.com/css2?family=Inter:wght@200;300;400;500;600;700;800&display=swap");
|
||||
@import url("https://fonts.googleapis.com/css2?family=Material+Symbols+Rounded:opsz,wght,FILL,GRAD@48,400,0,0&display=swap");
|
||||
@import "@plane/propel/styles/fonts";
|
||||
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@@ -60,23 +59,31 @@
|
||||
--color-border-300: 212, 212, 212; /* strong border- 1 */
|
||||
--color-border-400: 185, 185, 185; /* strong border- 2 */
|
||||
|
||||
--color-shadow-2xs: 0px 0px 1px 0px rgba(23, 23, 23, 0.06), 0px 1px 2px 0px rgba(23, 23, 23, 0.06),
|
||||
--color-shadow-2xs:
|
||||
0px 0px 1px 0px rgba(23, 23, 23, 0.06), 0px 1px 2px 0px rgba(23, 23, 23, 0.06),
|
||||
0px 1px 2px 0px rgba(23, 23, 23, 0.14);
|
||||
--color-shadow-xs: 0px 1px 2px 0px rgba(0, 0, 0, 0.16), 0px 2px 4px 0px rgba(16, 24, 40, 0.12),
|
||||
--color-shadow-xs:
|
||||
0px 1px 2px 0px rgba(0, 0, 0, 0.16), 0px 2px 4px 0px rgba(16, 24, 40, 0.12),
|
||||
0px 1px 8px -1px rgba(16, 24, 40, 0.1);
|
||||
--color-shadow-sm: 0px 1px 4px 0px rgba(0, 0, 0, 0.01), 0px 4px 8px 0px rgba(0, 0, 0, 0.02),
|
||||
0px 1px 12px 0px rgba(0, 0, 0, 0.12);
|
||||
--color-shadow-rg: 0px 3px 6px 0px rgba(0, 0, 0, 0.1), 0px 4px 4px 0px rgba(16, 24, 40, 0.08),
|
||||
--color-shadow-sm:
|
||||
0px 1px 4px 0px rgba(0, 0, 0, 0.01), 0px 4px 8px 0px rgba(0, 0, 0, 0.02), 0px 1px 12px 0px rgba(0, 0, 0, 0.12);
|
||||
--color-shadow-rg:
|
||||
0px 3px 6px 0px rgba(0, 0, 0, 0.1), 0px 4px 4px 0px rgba(16, 24, 40, 0.08),
|
||||
0px 1px 12px 0px rgba(16, 24, 40, 0.04);
|
||||
--color-shadow-md: 0px 4px 8px 0px rgba(0, 0, 0, 0.12), 0px 6px 12px 0px rgba(16, 24, 40, 0.12),
|
||||
--color-shadow-md:
|
||||
0px 4px 8px 0px rgba(0, 0, 0, 0.12), 0px 6px 12px 0px rgba(16, 24, 40, 0.12),
|
||||
0px 1px 16px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-lg: 0px 6px 12px 0px rgba(0, 0, 0, 0.12), 0px 8px 16px 0px rgba(0, 0, 0, 0.12),
|
||||
--color-shadow-lg:
|
||||
0px 6px 12px 0px rgba(0, 0, 0, 0.12), 0px 8px 16px 0px rgba(0, 0, 0, 0.12),
|
||||
0px 1px 24px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-xl: 0px 0px 18px 0px rgba(0, 0, 0, 0.16), 0px 0px 24px 0px rgba(16, 24, 40, 0.16),
|
||||
--color-shadow-xl:
|
||||
0px 0px 18px 0px rgba(0, 0, 0, 0.16), 0px 0px 24px 0px rgba(16, 24, 40, 0.16),
|
||||
0px 0px 52px 0px rgba(16, 24, 40, 0.16);
|
||||
--color-shadow-2xl: 0px 8px 16px 0px rgba(0, 0, 0, 0.12), 0px 12px 24px 0px rgba(16, 24, 40, 0.12),
|
||||
--color-shadow-2xl:
|
||||
0px 8px 16px 0px rgba(0, 0, 0, 0.12), 0px 12px 24px 0px rgba(16, 24, 40, 0.12),
|
||||
0px 1px 32px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-3xl: 0px 12px 24px 0px rgba(0, 0, 0, 0.12), 0px 16px 32px 0px rgba(0, 0, 0, 0.12),
|
||||
--color-shadow-3xl:
|
||||
0px 12px 24px 0px rgba(0, 0, 0, 0.12), 0px 16px 32px 0px rgba(0, 0, 0, 0.12),
|
||||
0px 1px 48px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-4xl: 0px 8px 40px 0px rgba(0, 0, 61, 0.05), 0px 12px 32px -16px rgba(0, 0, 0, 0.05);
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
{
|
||||
"extends": "@plane/typescript-config/nextjs.json",
|
||||
"compilerOptions": {
|
||||
"plugins": [{ "name": "next" }],
|
||||
"plugins": [
|
||||
{
|
||||
"name": "next"
|
||||
}
|
||||
],
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@/*": ["core/*"],
|
||||
"@/public/*": ["public/*"],
|
||||
"@/plane-admin/*": ["ce/*"]
|
||||
}
|
||||
"@/plane-admin/*": ["ce/*"],
|
||||
"@/styles/*": ["styles/*"]
|
||||
},
|
||||
"strictNullChecks": true
|
||||
},
|
||||
"include": ["next-env.d.ts", "next.config.js", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
|
||||
"exclude": ["node_modules"]
|
||||
|
||||
25
apiserver/.coveragerc
Normal file
25
apiserver/.coveragerc
Normal file
@@ -0,0 +1,25 @@
|
||||
[run]
|
||||
source = plane
|
||||
omit =
|
||||
*/tests/*
|
||||
*/migrations/*
|
||||
*/settings/*
|
||||
*/wsgi.py
|
||||
*/asgi.py
|
||||
*/urls.py
|
||||
manage.py
|
||||
*/admin.py
|
||||
*/apps.py
|
||||
|
||||
[report]
|
||||
exclude_lines =
|
||||
pragma: no cover
|
||||
def __repr__
|
||||
if self.debug:
|
||||
raise NotImplementedError
|
||||
if __name__ == .__main__.
|
||||
pass
|
||||
raise ImportError
|
||||
|
||||
[html]
|
||||
directory = htmlcov
|
||||
@@ -1,7 +1,7 @@
|
||||
# Backend
|
||||
# Debug value for api server use it as 0 for production use
|
||||
DEBUG=0
|
||||
CORS_ALLOWED_ORIGINS="http://localhost"
|
||||
CORS_ALLOWED_ORIGINS="http://localhost:3000,http://localhost:3001,http://localhost:3002,http://localhost:3100"
|
||||
|
||||
# Database Settings
|
||||
POSTGRES_USER="plane"
|
||||
@@ -27,7 +27,7 @@ RABBITMQ_VHOST="plane"
|
||||
AWS_REGION=""
|
||||
AWS_ACCESS_KEY_ID="access-key"
|
||||
AWS_SECRET_ACCESS_KEY="secret-key"
|
||||
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
|
||||
AWS_S3_ENDPOINT_URL="http://localhost:9000"
|
||||
# Changing this requires change in the nginx.conf for uploads if using minio setup
|
||||
AWS_S3_BUCKET_NAME="uploads"
|
||||
# Maximum file upload limit
|
||||
@@ -37,22 +37,31 @@ FILE_SIZE_LIMIT=5242880
|
||||
DOCKERIZED=1 # deprecated
|
||||
|
||||
# set to 1 If using the pre-configured minio setup
|
||||
USE_MINIO=1
|
||||
USE_MINIO=0
|
||||
|
||||
# Nginx Configuration
|
||||
NGINX_PORT=80
|
||||
|
||||
# Email redirections and minio domain settings
|
||||
WEB_URL="http://localhost"
|
||||
WEB_URL="http://localhost:8000"
|
||||
|
||||
# Gunicorn Workers
|
||||
GUNICORN_WORKERS=2
|
||||
|
||||
# Base URLs
|
||||
ADMIN_BASE_URL=
|
||||
SPACE_BASE_URL=
|
||||
APP_BASE_URL=
|
||||
ADMIN_BASE_URL="http://localhost:3001"
|
||||
ADMIN_BASE_PATH="/god-mode"
|
||||
|
||||
SPACE_BASE_URL="http://localhost:3002"
|
||||
SPACE_BASE_PATH="/spaces"
|
||||
|
||||
APP_BASE_URL="http://localhost:3000"
|
||||
APP_BASE_PATH=""
|
||||
|
||||
LIVE_BASE_URL="http://localhost:3100"
|
||||
LIVE_BASE_PATH="/live"
|
||||
|
||||
LIVE_SERVER_SECRET_KEY="secret-key"
|
||||
|
||||
# Hard delete files after days
|
||||
HARD_DELETE_AFTER_DAYS=60
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "plane-api",
|
||||
"version": "0.25.3",
|
||||
"version": "0.26.0",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"description": "API server powering Plane's backend"
|
||||
|
||||
@@ -15,4 +15,4 @@ from .state import StateLiteSerializer, StateSerializer
|
||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
|
||||
from .module import ModuleSerializer, ModuleIssueSerializer, ModuleLiteSerializer
|
||||
from .intake import IntakeIssueSerializer
|
||||
from .estimate import EstimatePointSerializer
|
||||
from .estimate import EstimatePointSerializer
|
||||
|
||||
@@ -39,12 +39,15 @@ class CycleSerializer(BaseSerializer):
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("end_date", None) is not None
|
||||
):
|
||||
project_id = self.initial_data.get("project_id") or self.instance.project_id
|
||||
is_start_date_end_date_equal = (
|
||||
True
|
||||
if str(data.get("start_date")) == str(data.get("end_date"))
|
||||
else False
|
||||
project_id = self.initial_data.get("project_id") or (
|
||||
self.instance.project_id
|
||||
if self.instance and hasattr(self.instance, "project_id")
|
||||
else None
|
||||
)
|
||||
|
||||
if not project_id:
|
||||
raise serializers.ValidationError("Project ID is required")
|
||||
|
||||
data["start_date"] = convert_to_utc(
|
||||
date=str(data.get("start_date").date()),
|
||||
project_id=project_id,
|
||||
@@ -53,7 +56,6 @@ class CycleSerializer(BaseSerializer):
|
||||
data["end_date"] = convert_to_utc(
|
||||
date=str(data.get("end_date", None).date()),
|
||||
project_id=project_id,
|
||||
is_start_date_end_date_equal=is_start_date_end_date_equal,
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
@@ -160,12 +160,15 @@ class IssueSerializer(BaseSerializer):
|
||||
else:
|
||||
try:
|
||||
# Then assign it to default assignee, if it is a valid assignee
|
||||
if default_assignee_id is not None and ProjectMember.objects.filter(
|
||||
member_id=default_assignee_id,
|
||||
project_id=project_id,
|
||||
role__gte=15,
|
||||
is_active=True
|
||||
).exists():
|
||||
if (
|
||||
default_assignee_id is not None
|
||||
and ProjectMember.objects.filter(
|
||||
member_id=default_assignee_id,
|
||||
project_id=project_id,
|
||||
role__gte=15,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
IssueAssignee.objects.create(
|
||||
assignee_id=default_assignee_id,
|
||||
issue=issue,
|
||||
|
||||
@@ -141,8 +141,10 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
if pk:
|
||||
queryset = self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
|
||||
data = CycleSerializer(
|
||||
queryset, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
queryset,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
queryset = self.get_queryset().filter(archived_at__isnull=True)
|
||||
@@ -154,8 +156,11 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
start_date__lte=timezone.now(), end_date__gte=timezone.now()
|
||||
)
|
||||
data = CycleSerializer(
|
||||
queryset, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
queryset,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -166,8 +171,11 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -178,8 +186,11 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -190,8 +201,11 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -204,16 +218,22 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -768,6 +788,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -779,6 +800,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -827,6 +849,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
)
|
||||
)
|
||||
old_cycle = old_cycle.first()
|
||||
|
||||
estimate_type = Project.objects.filter(
|
||||
workspace__slug=slug,
|
||||
@@ -946,7 +969,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
estimate_completion_chart = burndown_plot(
|
||||
queryset=old_cycle.first(),
|
||||
queryset=old_cycle,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="points",
|
||||
@@ -1094,7 +1117,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
# Pass the new_cycle queryset to burndown_plot
|
||||
completion_chart = burndown_plot(
|
||||
queryset=old_cycle.first(),
|
||||
queryset=old_cycle,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="issues",
|
||||
@@ -1106,12 +1129,12 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
).first()
|
||||
|
||||
current_cycle.progress_snapshot = {
|
||||
"total_issues": old_cycle.first().total_issues,
|
||||
"completed_issues": old_cycle.first().completed_issues,
|
||||
"cancelled_issues": old_cycle.first().cancelled_issues,
|
||||
"started_issues": old_cycle.first().started_issues,
|
||||
"unstarted_issues": old_cycle.first().unstarted_issues,
|
||||
"backlog_issues": old_cycle.first().backlog_issues,
|
||||
"total_issues": old_cycle.total_issues,
|
||||
"completed_issues": old_cycle.completed_issues,
|
||||
"cancelled_issues": old_cycle.cancelled_issues,
|
||||
"started_issues": old_cycle.started_issues,
|
||||
"unstarted_issues": old_cycle.unstarted_issues,
|
||||
"backlog_issues": old_cycle.backlog_issues,
|
||||
"distribution": {
|
||||
"labels": label_distribution_data,
|
||||
"assignees": assignee_distribution_data,
|
||||
|
||||
@@ -20,6 +20,7 @@ from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import Intake, IntakeIssue, Issue, Project, ProjectMember, State
|
||||
from plane.utils.host import base_host
|
||||
from .base import BaseAPIView
|
||||
from plane.db.models.intake import SourceType
|
||||
|
||||
|
||||
class IntakeIssueAPIEndpoint(BaseAPIView):
|
||||
@@ -125,7 +126,7 @@ class IntakeIssueAPIEndpoint(BaseAPIView):
|
||||
intake_id=intake.id,
|
||||
project_id=project_id,
|
||||
issue=issue,
|
||||
source=request.data.get("source", "IN-APP"),
|
||||
source=SourceType.IN_APP,
|
||||
)
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
|
||||
@@ -57,6 +57,7 @@ from plane.settings.storage import S3Storage
|
||||
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
|
||||
from .base import BaseAPIView
|
||||
from plane.utils.host import base_host
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
|
||||
|
||||
class WorkspaceIssueAPIEndpoint(BaseAPIView):
|
||||
@@ -322,6 +323,17 @@ class IssueAPIEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
|
||||
# Send the model activity
|
||||
model_activity.delay(
|
||||
model_name="issue",
|
||||
model_id=str(serializer.data["id"]),
|
||||
requested_data=request.data,
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
@@ -32,6 +32,7 @@ from plane.bgtasks.webhook_task import model_activity, webhook_activity
|
||||
from .base import BaseAPIView
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ProjectAPIEndpoint(BaseAPIView):
|
||||
"""Project Endpoints to create, update, list, retrieve and delete endpoint"""
|
||||
|
||||
@@ -171,14 +172,14 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
states = [
|
||||
{
|
||||
"name": "Backlog",
|
||||
"color": "#A3A3A3",
|
||||
"color": "#60646C",
|
||||
"sequence": 15000,
|
||||
"group": "backlog",
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"name": "Todo",
|
||||
"color": "#3A3A3A",
|
||||
"color": "#60646C",
|
||||
"sequence": 25000,
|
||||
"group": "unstarted",
|
||||
},
|
||||
@@ -190,13 +191,13 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
},
|
||||
{
|
||||
"name": "Done",
|
||||
"color": "#16A34A",
|
||||
"color": "#46A758",
|
||||
"sequence": 45000,
|
||||
"group": "completed",
|
||||
},
|
||||
{
|
||||
"name": "Cancelled",
|
||||
"color": "#EF4444",
|
||||
"color": "#9AA4BC",
|
||||
"sequence": 55000,
|
||||
"group": "cancelled",
|
||||
},
|
||||
@@ -238,7 +239,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The project name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
except Workspace.DoesNotExist:
|
||||
return Response(
|
||||
@@ -247,7 +248,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
except ValidationError:
|
||||
return Response(
|
||||
{"identifier": "The project identifier is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
def patch(self, request, slug, pk):
|
||||
@@ -305,7 +306,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The project name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
except (Project.DoesNotExist, Workspace.DoesNotExist):
|
||||
return Response(
|
||||
@@ -314,7 +315,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
except ValidationError:
|
||||
return Response(
|
||||
{"identifier": "The project identifier is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
def delete(self, request, slug, pk):
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import APIToken, APIActivityLog
|
||||
from rest_framework import serializers
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
class APITokenSerializer(BaseSerializer):
|
||||
@@ -17,10 +19,17 @@ class APITokenSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class APITokenReadSerializer(BaseSerializer):
|
||||
is_active = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = APIToken
|
||||
exclude = ("token",)
|
||||
|
||||
def get_is_active(self, obj: APIToken) -> bool:
|
||||
if obj.expired_at is None:
|
||||
return True
|
||||
return timezone.now() < obj.expired_at
|
||||
|
||||
|
||||
class APIActivityLogSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
|
||||
@@ -25,11 +25,6 @@ class CycleWriteSerializer(BaseSerializer):
|
||||
or (self.instance and self.instance.project_id)
|
||||
or self.context.get("project_id", None)
|
||||
)
|
||||
is_start_date_end_date_equal = (
|
||||
True
|
||||
if str(data.get("start_date")) == str(data.get("end_date"))
|
||||
else False
|
||||
)
|
||||
data["start_date"] = convert_to_utc(
|
||||
date=str(data.get("start_date").date()),
|
||||
project_id=project_id,
|
||||
@@ -38,7 +33,6 @@ class CycleWriteSerializer(BaseSerializer):
|
||||
data["end_date"] = convert_to_utc(
|
||||
date=str(data.get("end_date", None).date()),
|
||||
project_id=project_id,
|
||||
is_start_date_end_date_equal=is_start_date_end_date_equal,
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
@@ -53,6 +53,7 @@ def get_entity_model_and_serializer(entity_type):
|
||||
}
|
||||
return entity_map.get(entity_type, (None, None))
|
||||
|
||||
|
||||
class UserFavoriteSerializer(serializers.ModelSerializer):
|
||||
entity_data = serializers.SerializerMethodField()
|
||||
|
||||
|
||||
@@ -352,8 +352,19 @@ class IssueRelationSerializer(BaseSerializer):
|
||||
"state_id",
|
||||
"priority",
|
||||
"assignee_ids",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_by",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = ["workspace", "project"]
|
||||
|
||||
|
||||
class RelatedIssueSerializer(BaseSerializer):
|
||||
@@ -383,8 +394,19 @@ class RelatedIssueSerializer(BaseSerializer):
|
||||
"state_id",
|
||||
"priority",
|
||||
"assignee_ids",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_by",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_by",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = ["workspace", "project"]
|
||||
|
||||
|
||||
class IssueAssigneeSerializer(BaseSerializer):
|
||||
|
||||
@@ -148,10 +148,13 @@ class ProjectMemberAdminSerializer(BaseSerializer):
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ProjectMemberRoleSerializer(DynamicBaseSerializer):
|
||||
class ProjectMemberRoleSerializer(DynamicBaseSerializer):
|
||||
original_role = serializers.IntegerField(source='role', read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ProjectMember
|
||||
fields = ("id", "role", "member", "project")
|
||||
fields = ("id", "role", "member", "project", "original_role", "created_at")
|
||||
read_only_fields = ["original_role", "created_at"]
|
||||
|
||||
|
||||
class ProjectMemberInviteSerializer(BaseSerializer):
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from plane.db.models import State
|
||||
|
||||
|
||||
class StateSerializer(BaseSerializer):
|
||||
order = serializers.FloatField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = State
|
||||
fields = [
|
||||
@@ -18,6 +20,7 @@ class StateSerializer(BaseSerializer):
|
||||
"default",
|
||||
"description",
|
||||
"sequence",
|
||||
"order",
|
||||
]
|
||||
read_only_fields = ["workspace", "project"]
|
||||
|
||||
|
||||
@@ -3,11 +3,22 @@ from rest_framework import serializers
|
||||
|
||||
# Module import
|
||||
from plane.db.models import Account, Profile, User, Workspace, WorkspaceMemberInvite
|
||||
from plane.utils.url import contains_url
|
||||
|
||||
from .base import BaseSerializer
|
||||
|
||||
|
||||
class UserSerializer(BaseSerializer):
|
||||
def validate_first_name(self, value):
|
||||
if contains_url(value):
|
||||
raise serializers.ValidationError("First name cannot contain a URL.")
|
||||
return value
|
||||
|
||||
def validate_last_name(self, value):
|
||||
if contains_url(value):
|
||||
raise serializers.ValidationError("Last name cannot contain a URL.")
|
||||
return value
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
# Exclude password field from the serializer
|
||||
@@ -99,11 +110,16 @@ class UserMeSettingsSerializer(BaseSerializer):
|
||||
workspace_member__member=obj.id,
|
||||
workspace_member__is_active=True,
|
||||
).first()
|
||||
logo_asset_url = workspace.logo_asset.asset_url if workspace.logo_asset is not None else ""
|
||||
return {
|
||||
"last_workspace_id": profile.last_workspace_id,
|
||||
"last_workspace_slug": (
|
||||
workspace.slug if workspace is not None else ""
|
||||
),
|
||||
"last_workspace_name": (
|
||||
workspace.name if workspace is not None else ""
|
||||
),
|
||||
"last_workspace_logo": (logo_asset_url),
|
||||
"fallback_workspace_id": profile.last_workspace_id,
|
||||
"fallback_workspace_slug": (
|
||||
workspace.slug if workspace is not None else ""
|
||||
|
||||
@@ -25,10 +25,12 @@ from plane.db.models import (
|
||||
WorkspaceUserPreference,
|
||||
)
|
||||
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
|
||||
from plane.utils.url import contains_url
|
||||
|
||||
# Django imports
|
||||
from django.core.validators import URLValidator
|
||||
from django.core.exceptions import ValidationError
|
||||
import re
|
||||
|
||||
|
||||
class WorkSpaceSerializer(DynamicBaseSerializer):
|
||||
@@ -36,10 +38,21 @@ class WorkSpaceSerializer(DynamicBaseSerializer):
|
||||
logo_url = serializers.CharField(read_only=True)
|
||||
role = serializers.IntegerField(read_only=True)
|
||||
|
||||
def validate_name(self, value):
|
||||
# Check if the name contains a URL
|
||||
if contains_url(value):
|
||||
raise serializers.ValidationError("Name must not contain URLs")
|
||||
return value
|
||||
|
||||
def validate_slug(self, value):
|
||||
# Check if the slug is restricted
|
||||
if value in RESTRICTED_WORKSPACE_SLUGS:
|
||||
raise serializers.ValidationError("Slug is not valid")
|
||||
# Slug should only contain alphanumeric characters, hyphens, and underscores
|
||||
if not re.match(r"^[a-zA-Z0-9_-]+$", value):
|
||||
raise serializers.ValidationError(
|
||||
"Slug can only contain letters, numbers, hyphens (-), and underscores (_)"
|
||||
)
|
||||
return value
|
||||
|
||||
class Meta:
|
||||
@@ -148,7 +161,6 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def create(self, validated_data):
|
||||
# Filtering the WorkspaceUserLink with the given url to check if the link already exists.
|
||||
|
||||
@@ -157,7 +169,7 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
|
||||
workspace_user_link = WorkspaceUserLink.objects.filter(
|
||||
url=url,
|
||||
workspace_id=validated_data.get("workspace_id"),
|
||||
owner_id=validated_data.get("owner_id")
|
||||
owner_id=validated_data.get("owner_id"),
|
||||
)
|
||||
|
||||
if workspace_user_link.exists():
|
||||
@@ -173,10 +185,8 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
|
||||
url = validated_data.get("url")
|
||||
|
||||
workspace_user_link = WorkspaceUserLink.objects.filter(
|
||||
url=url,
|
||||
workspace_id=instance.workspace_id,
|
||||
owner=instance.owner
|
||||
)
|
||||
url=url, workspace_id=instance.workspace_id, owner=instance.owner
|
||||
)
|
||||
|
||||
if workspace_user_link.exclude(pk=instance.id).exists():
|
||||
raise serializers.ValidationError(
|
||||
@@ -185,6 +195,7 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class IssueRecentVisitSerializer(serializers.ModelSerializer):
|
||||
project_identifier = serializers.SerializerMethodField()
|
||||
|
||||
|
||||
@@ -6,8 +6,14 @@ from plane.app.views import (
|
||||
AnalyticViewViewset,
|
||||
SavedAnalyticEndpoint,
|
||||
ExportAnalyticsEndpoint,
|
||||
AdvanceAnalyticsEndpoint,
|
||||
AdvanceAnalyticsStatsEndpoint,
|
||||
AdvanceAnalyticsChartEndpoint,
|
||||
DefaultAnalyticsEndpoint,
|
||||
ProjectStatsEndpoint,
|
||||
ProjectAdvanceAnalyticsEndpoint,
|
||||
ProjectAdvanceAnalyticsStatsEndpoint,
|
||||
ProjectAdvanceAnalyticsChartEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -49,4 +55,34 @@ urlpatterns = [
|
||||
ProjectStatsEndpoint.as_view(),
|
||||
name="project-analytics",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/advance-analytics/",
|
||||
AdvanceAnalyticsEndpoint.as_view(),
|
||||
name="advance-analytics",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/advance-analytics-stats/",
|
||||
AdvanceAnalyticsStatsEndpoint.as_view(),
|
||||
name="advance-analytics-stats",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/advance-analytics-charts/",
|
||||
AdvanceAnalyticsChartEndpoint.as_view(),
|
||||
name="advance-analytics-chart",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/advance-analytics/",
|
||||
ProjectAdvanceAnalyticsEndpoint.as_view(),
|
||||
name="project-advance-analytics",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/advance-analytics-stats/",
|
||||
ProjectAdvanceAnalyticsStatsEndpoint.as_view(),
|
||||
name="project-advance-analytics-stats",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/advance-analytics-charts/",
|
||||
ProjectAdvanceAnalyticsChartEndpoint.as_view(),
|
||||
name="project-advance-analytics-chart",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -12,6 +12,7 @@ from plane.app.views import (
|
||||
AssetRestoreEndpoint,
|
||||
ProjectAssetEndpoint,
|
||||
ProjectBulkAssetEndpoint,
|
||||
AssetCheckEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -81,5 +82,11 @@ urlpatterns = [
|
||||
path(
|
||||
"assets/v2/workspaces/<str:slug>/projects/<uuid:project_id>/<uuid:entity_id>/bulk/",
|
||||
ProjectBulkAssetEndpoint.as_view(),
|
||||
name="bulk-asset-update",
|
||||
),
|
||||
path(
|
||||
"assets/v2/workspaces/<str:slug>/check/<uuid:asset_id>/",
|
||||
AssetCheckEndpoint.as_view(),
|
||||
name="asset-check",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -106,6 +106,7 @@ from .asset.v2 import (
|
||||
AssetRestoreEndpoint,
|
||||
ProjectAssetEndpoint,
|
||||
ProjectBulkAssetEndpoint,
|
||||
AssetCheckEndpoint,
|
||||
)
|
||||
from .issue.base import (
|
||||
IssueListEndpoint,
|
||||
@@ -199,6 +200,18 @@ from .analytic.base import (
|
||||
ProjectStatsEndpoint,
|
||||
)
|
||||
|
||||
from .analytic.advance import (
|
||||
AdvanceAnalyticsEndpoint,
|
||||
AdvanceAnalyticsStatsEndpoint,
|
||||
AdvanceAnalyticsChartEndpoint,
|
||||
)
|
||||
|
||||
from .analytic.project_analytics import (
|
||||
ProjectAdvanceAnalyticsEndpoint,
|
||||
ProjectAdvanceAnalyticsStatsEndpoint,
|
||||
ProjectAdvanceAnalyticsChartEndpoint,
|
||||
)
|
||||
|
||||
from .notification.base import (
|
||||
NotificationViewSet,
|
||||
UnreadNotificationEndpoint,
|
||||
|
||||
366
apiserver/plane/app/views/analytic/advance.py
Normal file
366
apiserver/plane/app/views/analytic/advance.py
Normal file
@@ -0,0 +1,366 @@
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from typing import Dict, List, Any
|
||||
from django.db.models import QuerySet, Q, Count
|
||||
from django.http import HttpRequest
|
||||
from django.db.models.functions import TruncMonth
|
||||
from django.utils import timezone
|
||||
from plane.app.views.base import BaseAPIView
|
||||
from plane.app.permissions import ROLE, allow_permission
|
||||
from plane.db.models import (
|
||||
WorkspaceMember,
|
||||
Project,
|
||||
Issue,
|
||||
Cycle,
|
||||
Module,
|
||||
IssueView,
|
||||
ProjectPage,
|
||||
Workspace,
|
||||
CycleIssue,
|
||||
ModuleIssue,
|
||||
ProjectMember,
|
||||
)
|
||||
from plane.utils.build_chart import build_analytics_chart
|
||||
from plane.utils.date_utils import (
|
||||
get_analytics_filters,
|
||||
)
|
||||
|
||||
|
||||
class AdvanceAnalyticsBaseView(BaseAPIView):
|
||||
def initialize_workspace(self, slug: str, type: str) -> None:
|
||||
self._workspace_slug = slug
|
||||
self.filters = get_analytics_filters(
|
||||
slug=slug,
|
||||
type=type,
|
||||
user=self.request.user,
|
||||
date_filter=self.request.GET.get("date_filter", None),
|
||||
project_ids=self.request.GET.get("project_ids", None),
|
||||
)
|
||||
|
||||
|
||||
class AdvanceAnalyticsEndpoint(AdvanceAnalyticsBaseView):
|
||||
def get_filtered_counts(self, queryset: QuerySet) -> Dict[str, int]:
|
||||
def get_filtered_count() -> int:
|
||||
if self.filters["analytics_date_range"]:
|
||||
return queryset.filter(
|
||||
created_at__gte=self.filters["analytics_date_range"]["current"][
|
||||
"gte"
|
||||
],
|
||||
created_at__lte=self.filters["analytics_date_range"]["current"][
|
||||
"lte"
|
||||
],
|
||||
).count()
|
||||
return queryset.count()
|
||||
|
||||
def get_previous_count() -> int:
|
||||
if self.filters["analytics_date_range"] and self.filters[
|
||||
"analytics_date_range"
|
||||
].get("previous"):
|
||||
return queryset.filter(
|
||||
created_at__gte=self.filters["analytics_date_range"]["previous"][
|
||||
"gte"
|
||||
],
|
||||
created_at__lte=self.filters["analytics_date_range"]["previous"][
|
||||
"lte"
|
||||
],
|
||||
).count()
|
||||
return 0
|
||||
|
||||
return {
|
||||
"count": get_filtered_count(),
|
||||
# "filter_count": get_previous_count(),
|
||||
}
|
||||
|
||||
def get_overview_data(self) -> Dict[str, Dict[str, int]]:
|
||||
members_query = WorkspaceMember.objects.filter(
|
||||
workspace__slug=self._workspace_slug, is_active=True
|
||||
)
|
||||
|
||||
if self.request.GET.get("project_ids", None):
|
||||
project_ids = self.request.GET.get("project_ids", None)
|
||||
project_ids = [str(project_id) for project_id in project_ids.split(",")]
|
||||
members_query = ProjectMember.objects.filter(
|
||||
project_id__in=project_ids, is_active=True
|
||||
)
|
||||
|
||||
return {
|
||||
"total_users": self.get_filtered_counts(members_query),
|
||||
"total_admins": self.get_filtered_counts(
|
||||
members_query.filter(role=ROLE.ADMIN.value)
|
||||
),
|
||||
"total_members": self.get_filtered_counts(
|
||||
members_query.filter(role=ROLE.MEMBER.value)
|
||||
),
|
||||
"total_guests": self.get_filtered_counts(
|
||||
members_query.filter(role=ROLE.GUEST.value)
|
||||
),
|
||||
"total_projects": self.get_filtered_counts(
|
||||
Project.objects.filter(**self.filters["project_filters"])
|
||||
),
|
||||
"total_work_items": self.get_filtered_counts(
|
||||
Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
),
|
||||
"total_cycles": self.get_filtered_counts(
|
||||
Cycle.objects.filter(**self.filters["base_filters"])
|
||||
),
|
||||
"total_intake": self.get_filtered_counts(
|
||||
Issue.objects.filter(**self.filters["base_filters"]).filter(
|
||||
issue_intake__status__in=["-2", "0"]
|
||||
)
|
||||
),
|
||||
}
|
||||
|
||||
def get_work_items_stats(self) -> Dict[str, Dict[str, int]]:
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
|
||||
return {
|
||||
"total_work_items": self.get_filtered_counts(base_queryset),
|
||||
"started_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="started")
|
||||
),
|
||||
"backlog_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="backlog")
|
||||
),
|
||||
"un_started_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="unstarted")
|
||||
),
|
||||
"completed_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="completed")
|
||||
),
|
||||
}
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
|
||||
def get(self, request: HttpRequest, slug: str) -> Response:
|
||||
self.initialize_workspace(slug, type="analytics")
|
||||
tab = request.GET.get("tab", "overview")
|
||||
|
||||
if tab == "overview":
|
||||
return Response(
|
||||
self.get_overview_data(),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
elif tab == "work-items":
|
||||
return Response(
|
||||
self.get_work_items_stats(),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return Response({"message": "Invalid tab"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class AdvanceAnalyticsStatsEndpoint(AdvanceAnalyticsBaseView):
|
||||
def get_project_issues_stats(self) -> QuerySet:
|
||||
# Get the base queryset with workspace and project filters
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
base_queryset = base_queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
return (
|
||||
base_queryset.values("project_id", "project__name").annotate(
|
||||
cancelled_work_items=Count("id", filter=Q(state__group="cancelled")),
|
||||
completed_work_items=Count("id", filter=Q(state__group="completed")),
|
||||
backlog_work_items=Count("id", filter=Q(state__group="backlog")),
|
||||
un_started_work_items=Count("id", filter=Q(state__group="unstarted")),
|
||||
started_work_items=Count("id", filter=Q(state__group="started")),
|
||||
)
|
||||
.order_by("project_id")
|
||||
)
|
||||
|
||||
def get_work_items_stats(self) -> Dict[str, Dict[str, int]]:
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
return (
|
||||
base_queryset
|
||||
.values("project_id", "project__name")
|
||||
.annotate(
|
||||
cancelled_work_items=Count("id", filter=Q(state__group="cancelled")),
|
||||
completed_work_items=Count("id", filter=Q(state__group="completed")),
|
||||
backlog_work_items=Count("id", filter=Q(state__group="backlog")),
|
||||
un_started_work_items=Count("id", filter=Q(state__group="unstarted")),
|
||||
started_work_items=Count("id", filter=Q(state__group="started")),
|
||||
)
|
||||
.order_by("project_id")
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
|
||||
def get(self, request: HttpRequest, slug: str) -> Response:
|
||||
self.initialize_workspace(slug, type="chart")
|
||||
type = request.GET.get("type", "work-items")
|
||||
|
||||
if type == "work-items":
|
||||
return Response(
|
||||
self.get_work_items_stats(),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response({"message": "Invalid type"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class AdvanceAnalyticsChartEndpoint(AdvanceAnalyticsBaseView):
|
||||
def project_chart(self) -> List[Dict[str, Any]]:
|
||||
# Get the base queryset with workspace and project filters
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
date_filter = {}
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
date_filter = {
|
||||
"created_at__date__gte": start_date,
|
||||
"created_at__date__lte": end_date,
|
||||
}
|
||||
|
||||
total_work_items = base_queryset.filter(**date_filter).count()
|
||||
total_cycles = Cycle.objects.filter(
|
||||
**self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
total_modules = Module.objects.filter(
|
||||
**self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
total_intake = Issue.objects.filter(
|
||||
issue_intake__isnull=False, **self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
total_members = WorkspaceMember.objects.filter(
|
||||
workspace__slug=self._workspace_slug, is_active=True, **date_filter
|
||||
).count()
|
||||
total_pages = ProjectPage.objects.filter(
|
||||
**self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
total_views = IssueView.objects.filter(
|
||||
**self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
|
||||
data = {
|
||||
"work_items": total_work_items,
|
||||
"cycles": total_cycles,
|
||||
"modules": total_modules,
|
||||
"intake": total_intake,
|
||||
"members": total_members,
|
||||
"pages": total_pages,
|
||||
"views": total_views,
|
||||
}
|
||||
|
||||
return [
|
||||
{
|
||||
"key": key,
|
||||
"name": key.replace("_", " ").title(),
|
||||
"count": value or 0,
|
||||
}
|
||||
for key, value in data.items()
|
||||
]
|
||||
|
||||
def work_item_completion_chart(self) -> Dict[str, Any]:
|
||||
# Get the base queryset
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
.select_related("workspace", "state", "parent")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_module__module", "issue_cycle__cycle"
|
||||
)
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.get(slug=self._workspace_slug)
|
||||
start_date = workspace.created_at.date().replace(day=1)
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
queryset = queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
# Annotate by month and count
|
||||
monthly_stats = (
|
||||
queryset.annotate(month=TruncMonth("created_at"))
|
||||
.values("month")
|
||||
.annotate(
|
||||
created_count=Count("id"),
|
||||
completed_count=Count("id", filter=Q(state__group="completed")),
|
||||
)
|
||||
.order_by("month")
|
||||
)
|
||||
|
||||
# Create dictionary of month -> counts
|
||||
stats_dict = {
|
||||
stat["month"].strftime("%Y-%m-%d"): {
|
||||
"created_count": stat["created_count"],
|
||||
"completed_count": stat["completed_count"],
|
||||
}
|
||||
for stat in monthly_stats
|
||||
}
|
||||
|
||||
# Generate monthly data (ensure months with 0 count are included)
|
||||
data = []
|
||||
# include the current date at the end
|
||||
end_date = timezone.now().date()
|
||||
last_month = end_date.replace(day=1)
|
||||
current_month = start_date
|
||||
|
||||
while current_month <= last_month:
|
||||
date_str = current_month.strftime("%Y-%m-%d")
|
||||
stats = stats_dict.get(date_str, {"created_count": 0, "completed_count": 0})
|
||||
data.append(
|
||||
{
|
||||
"key": date_str,
|
||||
"name": date_str,
|
||||
"count": stats["created_count"],
|
||||
"completed_issues": stats["completed_count"],
|
||||
"created_issues": stats["created_count"],
|
||||
}
|
||||
)
|
||||
# Move to next month
|
||||
if current_month.month == 12:
|
||||
current_month = current_month.replace(
|
||||
year=current_month.year + 1, month=1
|
||||
)
|
||||
else:
|
||||
current_month = current_month.replace(month=current_month.month + 1)
|
||||
|
||||
schema = {
|
||||
"completed_issues": "completed_issues",
|
||||
"created_issues": "created_issues",
|
||||
}
|
||||
|
||||
return {"data": data, "schema": schema}
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
|
||||
def get(self, request: HttpRequest, slug: str) -> Response:
|
||||
self.initialize_workspace(slug, type="chart")
|
||||
type = request.GET.get("type", "projects")
|
||||
group_by = request.GET.get("group_by", None)
|
||||
x_axis = request.GET.get("x_axis", "PRIORITY")
|
||||
|
||||
if type == "projects":
|
||||
return Response(self.project_chart(), status=status.HTTP_200_OK)
|
||||
|
||||
elif type == "custom-work-items":
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
.select_related("workspace", "state", "parent")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_module__module", "issue_cycle__cycle"
|
||||
)
|
||||
)
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
queryset = queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
return Response(
|
||||
build_analytics_chart(queryset, x_axis, group_by),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
elif type == "work-items":
|
||||
return Response(
|
||||
self.work_item_completion_chart(),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response({"message": "Invalid type"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
421
apiserver/plane/app/views/analytic/project_analytics.py
Normal file
421
apiserver/plane/app/views/analytic/project_analytics.py
Normal file
@@ -0,0 +1,421 @@
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from typing import Dict, Any
|
||||
from django.db.models import QuerySet, Q, Count
|
||||
from django.http import HttpRequest
|
||||
from django.db.models.functions import TruncMonth
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
from plane.app.views.base import BaseAPIView
|
||||
from plane.app.permissions import ROLE, allow_permission
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
Issue,
|
||||
Cycle,
|
||||
Module,
|
||||
CycleIssue,
|
||||
ModuleIssue,
|
||||
)
|
||||
from django.db import models
|
||||
from django.db.models import F, Case, When, Value
|
||||
from django.db.models.functions import Concat
|
||||
from plane.utils.build_chart import build_analytics_chart
|
||||
from plane.utils.date_utils import (
|
||||
get_analytics_filters,
|
||||
)
|
||||
|
||||
|
||||
class ProjectAdvanceAnalyticsBaseView(BaseAPIView):
|
||||
def initialize_workspace(self, slug: str, type: str) -> None:
|
||||
self._workspace_slug = slug
|
||||
self.filters = get_analytics_filters(
|
||||
slug=slug,
|
||||
type=type,
|
||||
user=self.request.user,
|
||||
date_filter=self.request.GET.get("date_filter", None),
|
||||
project_ids=self.request.GET.get("project_ids", None),
|
||||
)
|
||||
|
||||
|
||||
class ProjectAdvanceAnalyticsEndpoint(ProjectAdvanceAnalyticsBaseView):
|
||||
def get_filtered_counts(self, queryset: QuerySet) -> Dict[str, int]:
|
||||
def get_filtered_count() -> int:
|
||||
if self.filters["analytics_date_range"]:
|
||||
return queryset.filter(
|
||||
created_at__gte=self.filters["analytics_date_range"]["current"][
|
||||
"gte"
|
||||
],
|
||||
created_at__lte=self.filters["analytics_date_range"]["current"][
|
||||
"lte"
|
||||
],
|
||||
).count()
|
||||
return queryset.count()
|
||||
|
||||
return {
|
||||
"count": get_filtered_count(),
|
||||
}
|
||||
|
||||
def get_work_items_stats(
|
||||
self, project_id, cycle_id=None, module_id=None
|
||||
) -> Dict[str, Dict[str, int]]:
|
||||
"""
|
||||
Returns work item stats for the workspace, or filtered by cycle_id or module_id if provided.
|
||||
"""
|
||||
base_queryset = None
|
||||
if cycle_id is not None:
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
**self.filters["base_filters"], cycle_id=cycle_id
|
||||
).values_list("issue_id", flat=True)
|
||||
base_queryset = Issue.issue_objects.filter(id__in=cycle_issues)
|
||||
elif module_id is not None:
|
||||
module_issues = ModuleIssue.objects.filter(
|
||||
**self.filters["base_filters"], module_id=module_id
|
||||
).values_list("issue_id", flat=True)
|
||||
base_queryset = Issue.issue_objects.filter(id__in=module_issues)
|
||||
else:
|
||||
base_queryset = Issue.issue_objects.filter(
|
||||
**self.filters["base_filters"], project_id=project_id
|
||||
)
|
||||
|
||||
return {
|
||||
"total_work_items": self.get_filtered_counts(base_queryset),
|
||||
"started_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="started")
|
||||
),
|
||||
"backlog_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="backlog")
|
||||
),
|
||||
"un_started_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="unstarted")
|
||||
),
|
||||
"completed_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="completed")
|
||||
),
|
||||
}
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def get(self, request: HttpRequest, slug: str, project_id: str) -> Response:
|
||||
self.initialize_workspace(slug, type="analytics")
|
||||
|
||||
# Optionally accept cycle_id or module_id as query params
|
||||
cycle_id = request.GET.get("cycle_id", None)
|
||||
module_id = request.GET.get("module_id", None)
|
||||
return Response(
|
||||
self.get_work_items_stats(
|
||||
cycle_id=cycle_id, module_id=module_id, project_id=project_id
|
||||
),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class ProjectAdvanceAnalyticsStatsEndpoint(ProjectAdvanceAnalyticsBaseView):
|
||||
def get_project_issues_stats(self) -> QuerySet:
|
||||
# Get the base queryset with workspace and project filters
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
base_queryset = base_queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
return (
|
||||
base_queryset.values("project_id", "project__name")
|
||||
.annotate(
|
||||
cancelled_work_items=Count("id", filter=Q(state__group="cancelled")),
|
||||
completed_work_items=Count("id", filter=Q(state__group="completed")),
|
||||
backlog_work_items=Count("id", filter=Q(state__group="backlog")),
|
||||
un_started_work_items=Count("id", filter=Q(state__group="unstarted")),
|
||||
started_work_items=Count("id", filter=Q(state__group="started")),
|
||||
)
|
||||
.order_by("project_id")
|
||||
)
|
||||
|
||||
def get_work_items_stats(
|
||||
self, project_id, cycle_id=None, module_id=None
|
||||
) -> Dict[str, Dict[str, int]]:
|
||||
base_queryset = None
|
||||
if cycle_id is not None:
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
**self.filters["base_filters"], cycle_id=cycle_id
|
||||
).values_list("issue_id", flat=True)
|
||||
base_queryset = Issue.issue_objects.filter(id__in=cycle_issues)
|
||||
elif module_id is not None:
|
||||
module_issues = ModuleIssue.objects.filter(
|
||||
**self.filters["base_filters"], module_id=module_id
|
||||
).values_list("issue_id", flat=True)
|
||||
base_queryset = Issue.issue_objects.filter(id__in=module_issues)
|
||||
else:
|
||||
base_queryset = Issue.issue_objects.filter(
|
||||
**self.filters["base_filters"], project_id=project_id
|
||||
)
|
||||
return (
|
||||
base_queryset.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.annotate(
|
||||
avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(
|
||||
assignees__avatar_asset__isnull=True, then="assignees__avatar"
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.values("display_name", "assignee_id", "avatar_url")
|
||||
.annotate(
|
||||
cancelled_work_items=Count(
|
||||
"id", filter=Q(state__group="cancelled"), distinct=True
|
||||
),
|
||||
completed_work_items=Count(
|
||||
"id", filter=Q(state__group="completed"), distinct=True
|
||||
),
|
||||
backlog_work_items=Count(
|
||||
"id", filter=Q(state__group="backlog"), distinct=True
|
||||
),
|
||||
un_started_work_items=Count(
|
||||
"id", filter=Q(state__group="unstarted"), distinct=True
|
||||
),
|
||||
started_work_items=Count(
|
||||
"id", filter=Q(state__group="started"), distinct=True
|
||||
),
|
||||
)
|
||||
.order_by("display_name")
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def get(self, request: HttpRequest, slug: str, project_id: str) -> Response:
|
||||
self.initialize_workspace(slug, type="chart")
|
||||
type = request.GET.get("type", "work-items")
|
||||
|
||||
if type == "work-items":
|
||||
# Optionally accept cycle_id or module_id as query params
|
||||
cycle_id = request.GET.get("cycle_id", None)
|
||||
module_id = request.GET.get("module_id", None)
|
||||
return Response(
|
||||
self.get_work_items_stats(
|
||||
project_id=project_id, cycle_id=cycle_id, module_id=module_id
|
||||
),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response({"message": "Invalid type"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class ProjectAdvanceAnalyticsChartEndpoint(ProjectAdvanceAnalyticsBaseView):
|
||||
def work_item_completion_chart(
|
||||
self, project_id, cycle_id=None, module_id=None
|
||||
) -> Dict[str, Any]:
|
||||
# Get the base queryset
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
.filter(project_id=project_id)
|
||||
.select_related("workspace", "state", "parent")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_module__module", "issue_cycle__cycle"
|
||||
)
|
||||
)
|
||||
|
||||
if cycle_id is not None:
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
**self.filters["base_filters"], cycle_id=cycle_id
|
||||
).values_list("issue_id", flat=True)
|
||||
cycle = Cycle.objects.filter(id=cycle_id).first()
|
||||
if cycle and cycle.start_date:
|
||||
start_date = cycle.start_date.date()
|
||||
end_date = cycle.end_date.date()
|
||||
else:
|
||||
return {"data": [], "schema": {}}
|
||||
queryset = cycle_issues
|
||||
|
||||
elif module_id is not None:
|
||||
module_issues = ModuleIssue.objects.filter(
|
||||
**self.filters["base_filters"], module_id=module_id
|
||||
).values_list("issue_id", flat=True)
|
||||
module = Module.objects.filter(id=module_id).first()
|
||||
if module and module.start_date:
|
||||
start_date = module.start_date
|
||||
end_date = module.target_date
|
||||
else:
|
||||
return {"data": [], "schema": {}}
|
||||
queryset = module_issues
|
||||
|
||||
else:
|
||||
project = Project.objects.filter(id=project_id).first()
|
||||
if project.created_at:
|
||||
start_date = project.created_at.date().replace(day=1)
|
||||
else:
|
||||
return {"data": [], "schema": {}}
|
||||
|
||||
if cycle_id or module_id:
|
||||
# Get daily stats with optimized query
|
||||
daily_stats = (
|
||||
queryset.values("created_at__date")
|
||||
.annotate(
|
||||
created_count=Count("id"),
|
||||
completed_count=Count(
|
||||
"id", filter=Q(issue__state__group="completed")
|
||||
),
|
||||
)
|
||||
.order_by("created_at__date")
|
||||
)
|
||||
|
||||
# Create a dictionary of existing stats with summed counts
|
||||
stats_dict = {
|
||||
stat["created_at__date"].strftime("%Y-%m-%d"): {
|
||||
"created_count": stat["created_count"],
|
||||
"completed_count": stat["completed_count"],
|
||||
}
|
||||
for stat in daily_stats
|
||||
}
|
||||
|
||||
# Generate data for all days in the range
|
||||
data = []
|
||||
current_date = start_date
|
||||
while current_date <= end_date:
|
||||
date_str = current_date.strftime("%Y-%m-%d")
|
||||
stats = stats_dict.get(
|
||||
date_str, {"created_count": 0, "completed_count": 0}
|
||||
)
|
||||
data.append(
|
||||
{
|
||||
"key": date_str,
|
||||
"name": date_str,
|
||||
"count": stats["created_count"] + stats["completed_count"],
|
||||
"completed_issues": stats["completed_count"],
|
||||
"created_issues": stats["created_count"],
|
||||
}
|
||||
)
|
||||
current_date += timedelta(days=1)
|
||||
else:
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
queryset = queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
# Annotate by month and count
|
||||
monthly_stats = (
|
||||
queryset.annotate(month=TruncMonth("created_at"))
|
||||
.values("month")
|
||||
.annotate(
|
||||
created_count=Count("id"),
|
||||
completed_count=Count("id", filter=Q(state__group="completed")),
|
||||
)
|
||||
.order_by("month")
|
||||
)
|
||||
|
||||
# Create dictionary of month -> counts
|
||||
stats_dict = {
|
||||
stat["month"].strftime("%Y-%m-%d"): {
|
||||
"created_count": stat["created_count"],
|
||||
"completed_count": stat["completed_count"],
|
||||
}
|
||||
for stat in monthly_stats
|
||||
}
|
||||
|
||||
# Generate monthly data (ensure months with 0 count are included)
|
||||
data = []
|
||||
# include the current date at the end
|
||||
end_date = timezone.now().date()
|
||||
last_month = end_date.replace(day=1)
|
||||
current_month = start_date
|
||||
|
||||
while current_month <= last_month:
|
||||
date_str = current_month.strftime("%Y-%m-%d")
|
||||
stats = stats_dict.get(
|
||||
date_str, {"created_count": 0, "completed_count": 0}
|
||||
)
|
||||
data.append(
|
||||
{
|
||||
"key": date_str,
|
||||
"name": date_str,
|
||||
"count": stats["created_count"],
|
||||
"completed_issues": stats["completed_count"],
|
||||
"created_issues": stats["created_count"],
|
||||
}
|
||||
)
|
||||
# Move to next month
|
||||
if current_month.month == 12:
|
||||
current_month = current_month.replace(
|
||||
year=current_month.year + 1, month=1
|
||||
)
|
||||
else:
|
||||
current_month = current_month.replace(month=current_month.month + 1)
|
||||
|
||||
schema = {
|
||||
"completed_issues": "completed_issues",
|
||||
"created_issues": "created_issues",
|
||||
}
|
||||
|
||||
return {"data": data, "schema": schema}
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def get(self, request: HttpRequest, slug: str, project_id: str) -> Response:
|
||||
self.initialize_workspace(slug, type="chart")
|
||||
type = request.GET.get("type", "projects")
|
||||
group_by = request.GET.get("group_by", None)
|
||||
x_axis = request.GET.get("x_axis", "PRIORITY")
|
||||
cycle_id = request.GET.get("cycle_id", None)
|
||||
module_id = request.GET.get("module_id", None)
|
||||
|
||||
if type == "custom-work-items":
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
.filter(project_id=project_id)
|
||||
.select_related("workspace", "state", "parent")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_module__module", "issue_cycle__cycle"
|
||||
)
|
||||
)
|
||||
|
||||
# Apply cycle/module filters if present
|
||||
if cycle_id is not None:
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
**self.filters["base_filters"], cycle_id=cycle_id
|
||||
).values_list("issue_id", flat=True)
|
||||
queryset = queryset.filter(id__in=cycle_issues)
|
||||
|
||||
elif module_id is not None:
|
||||
module_issues = ModuleIssue.objects.filter(
|
||||
**self.filters["base_filters"], module_id=module_id
|
||||
).values_list("issue_id", flat=True)
|
||||
queryset = queryset.filter(id__in=module_issues)
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
queryset = queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
return Response(
|
||||
build_analytics_chart(queryset, x_axis, group_by),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
elif type == "work-items":
|
||||
# Optionally accept cycle_id or module_id as query params
|
||||
cycle_id = request.GET.get("cycle_id", None)
|
||||
module_id = request.GET.get("module_id", None)
|
||||
|
||||
return Response(
|
||||
self.work_item_completion_chart(
|
||||
project_id=project_id, cycle_id=cycle_id, module_id=module_id
|
||||
),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response({"message": "Invalid type"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -9,11 +9,11 @@ from rest_framework import status
|
||||
from .base import BaseAPIView
|
||||
from plane.db.models import APIToken, Workspace
|
||||
from plane.app.serializers import APITokenSerializer, APITokenReadSerializer
|
||||
from plane.app.permissions import WorkspaceOwnerPermission
|
||||
from plane.app.permissions import WorkspaceEntityPermission
|
||||
|
||||
|
||||
class ApiTokenEndpoint(BaseAPIView):
|
||||
permission_classes = [WorkspaceOwnerPermission]
|
||||
permission_classes = [WorkspaceEntityPermission]
|
||||
|
||||
def post(self, request, slug):
|
||||
label = request.data.get("label", str(uuid4().hex))
|
||||
@@ -68,7 +68,7 @@ class ApiTokenEndpoint(BaseAPIView):
|
||||
|
||||
|
||||
class ServiceApiTokenEndpoint(BaseAPIView):
|
||||
permission_classes = [WorkspaceOwnerPermission]
|
||||
permission_classes = [WorkspaceEntityPermission]
|
||||
|
||||
def post(self, request, slug):
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
@@ -137,7 +137,7 @@ class UserAssetsV2Endpoint(BaseAPIView):
|
||||
if type not in allowed_types:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
|
||||
"error": "Invalid file type. Only JPEG, PNG, WebP, JPG and GIF files are allowed.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -351,7 +351,7 @@ class WorkspaceFileAssetEndpoint(BaseAPIView):
|
||||
if type not in allowed_types:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
|
||||
"error": "Invalid file type. Only JPEG, PNG, WebP, JPG and GIF files are allowed.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -552,7 +552,7 @@ class ProjectAssetEndpoint(BaseAPIView):
|
||||
if type not in allowed_types:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
|
||||
"error": "Invalid file type. Only JPEG, PNG, WebP, JPG and GIF files are allowed.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -683,7 +683,7 @@ class ProjectBulkAssetEndpoint(BaseAPIView):
|
||||
# For some cases, the bulk api is called after the issue is deleted creating
|
||||
# an integrity error
|
||||
try:
|
||||
assets.update(issue_id=entity_id)
|
||||
assets.update(issue_id=entity_id, project_id=project_id)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
@@ -707,3 +707,14 @@ class ProjectBulkAssetEndpoint(BaseAPIView):
|
||||
pass
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class AssetCheckEndpoint(BaseAPIView):
|
||||
"""Endpoint to check if an asset exists."""
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE")
|
||||
def get(self, request, slug, asset_id):
|
||||
asset = FileAsset.all_objects.filter(
|
||||
id=asset_id, workspace__slug=slug, deleted_at__isnull=True
|
||||
).exists()
|
||||
return Response({"exists": asset}, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -117,6 +117,7 @@ class CycleViewSet(BaseViewSet):
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -129,6 +130,7 @@ class CycleViewSet(BaseViewSet):
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -141,6 +143,7 @@ class CycleViewSet(BaseViewSet):
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -266,9 +269,7 @@ class CycleViewSet(BaseViewSet):
|
||||
"created_by",
|
||||
)
|
||||
datetime_fields = ["start_date", "end_date"]
|
||||
data = user_timezone_converter(
|
||||
data, datetime_fields, project_timezone
|
||||
)
|
||||
data = user_timezone_converter(data, datetime_fields, project_timezone)
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
@@ -415,9 +416,7 @@ class CycleViewSet(BaseViewSet):
|
||||
project_timezone = project.timezone
|
||||
|
||||
datetime_fields = ["start_date", "end_date"]
|
||||
cycle = user_timezone_converter(
|
||||
cycle, datetime_fields, project_timezone
|
||||
)
|
||||
cycle = user_timezone_converter(cycle, datetime_fields, project_timezone)
|
||||
|
||||
# Send the model activity
|
||||
model_activity.delay(
|
||||
@@ -574,16 +573,12 @@ class CycleDateCheckEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
is_start_date_end_date_equal = (
|
||||
True if str(start_date) == str(end_date) else False
|
||||
)
|
||||
start_date = convert_to_utc(
|
||||
date=str(start_date), project_id=project_id, is_start_date=True
|
||||
)
|
||||
end_date = convert_to_utc(
|
||||
date=str(end_date),
|
||||
project_id=project_id,
|
||||
is_start_date_end_date_equal=is_start_date_end_date_equal,
|
||||
)
|
||||
|
||||
# Check if any cycle intersects in the given interval
|
||||
@@ -668,6 +663,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
@@ -732,6 +728,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
)
|
||||
)
|
||||
)
|
||||
old_cycle = old_cycle.first()
|
||||
|
||||
estimate_type = Project.objects.filter(
|
||||
workspace__slug=slug,
|
||||
@@ -850,7 +847,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
estimate_completion_chart = burndown_plot(
|
||||
queryset=old_cycle.first(),
|
||||
queryset=old_cycle,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="points",
|
||||
@@ -997,7 +994,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
|
||||
# Pass the new_cycle queryset to burndown_plot
|
||||
completion_chart = burndown_plot(
|
||||
queryset=old_cycle.first(),
|
||||
queryset=old_cycle,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="issues",
|
||||
@@ -1009,12 +1006,12 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
).first()
|
||||
|
||||
current_cycle.progress_snapshot = {
|
||||
"total_issues": old_cycle.first().total_issues,
|
||||
"completed_issues": old_cycle.first().completed_issues,
|
||||
"cancelled_issues": old_cycle.first().cancelled_issues,
|
||||
"started_issues": old_cycle.first().started_issues,
|
||||
"unstarted_issues": old_cycle.first().unstarted_issues,
|
||||
"backlog_issues": old_cycle.first().backlog_issues,
|
||||
"total_issues": old_cycle.total_issues,
|
||||
"completed_issues": old_cycle.completed_issues,
|
||||
"cancelled_issues": old_cycle.cancelled_issues,
|
||||
"started_issues": old_cycle.started_issues,
|
||||
"unstarted_issues": old_cycle.unstarted_issues,
|
||||
"backlog_issues": old_cycle.backlog_issues,
|
||||
"distribution": {
|
||||
"labels": label_distribution_data,
|
||||
"assignees": assignee_distribution_data,
|
||||
@@ -1122,6 +1119,13 @@ class CycleUserPropertiesEndpoint(BaseAPIView):
|
||||
class CycleProgressEndpoint(BaseAPIView):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def get(self, request, slug, project_id, cycle_id):
|
||||
cycle = Cycle.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, id=cycle_id
|
||||
).first()
|
||||
if not cycle:
|
||||
return Response(
|
||||
{"error": "Cycle not found"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
aggregate_estimates = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
@@ -1172,53 +1176,60 @@ class CycleProgressEndpoint(BaseAPIView):
|
||||
),
|
||||
)
|
||||
)
|
||||
if cycle.progress_snapshot:
|
||||
backlog_issues = cycle.progress_snapshot.get("backlog_issues", 0)
|
||||
unstarted_issues = cycle.progress_snapshot.get("unstarted_issues", 0)
|
||||
started_issues = cycle.progress_snapshot.get("started_issues", 0)
|
||||
cancelled_issues = cycle.progress_snapshot.get("cancelled_issues", 0)
|
||||
completed_issues = cycle.progress_snapshot.get("completed_issues", 0)
|
||||
total_issues = cycle.progress_snapshot.get("total_issues", 0)
|
||||
else:
|
||||
backlog_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="backlog",
|
||||
).count()
|
||||
|
||||
backlog_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="backlog",
|
||||
).count()
|
||||
unstarted_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="unstarted",
|
||||
).count()
|
||||
|
||||
unstarted_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="unstarted",
|
||||
).count()
|
||||
started_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="started",
|
||||
).count()
|
||||
|
||||
started_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="started",
|
||||
).count()
|
||||
cancelled_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="cancelled",
|
||||
).count()
|
||||
|
||||
cancelled_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="cancelled",
|
||||
).count()
|
||||
completed_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="completed",
|
||||
).count()
|
||||
|
||||
completed_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
state__group="completed",
|
||||
).count()
|
||||
|
||||
total_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
).count()
|
||||
total_issues = Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
).count()
|
||||
|
||||
return Response(
|
||||
{
|
||||
@@ -1279,6 +1290,25 @@ class CycleAnalyticsEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# this will tell whether the issues were transferred to the new cycle
|
||||
"""
|
||||
if the issues were transferred to the new cycle, then the progress_snapshot will be present
|
||||
return the progress_snapshot data in the analytics for each date
|
||||
|
||||
else issues were not transferred to the new cycle then generate the stats from the cycle isssue bridge tables
|
||||
"""
|
||||
|
||||
if cycle.progress_snapshot:
|
||||
distribution = cycle.progress_snapshot.get("distribution", {})
|
||||
return Response(
|
||||
{
|
||||
"labels": distribution.get("labels", []),
|
||||
"assignees": distribution.get("assignees", []),
|
||||
"completion_chart": distribution.get("completion_chart", {}),
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
estimate_type = Project.objects.filter(
|
||||
workspace__slug=slug,
|
||||
pk=project_id,
|
||||
|
||||
@@ -29,6 +29,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class CycleIssueViewSet(BaseViewSet):
|
||||
serializer_class = CycleIssueSerializer
|
||||
model = CycleIssue
|
||||
|
||||
69
apiserver/plane/app/views/external/base.py
vendored
69
apiserver/plane/app/views/external/base.py
vendored
@@ -11,8 +11,7 @@ from rest_framework.response import Response
|
||||
|
||||
# Module import
|
||||
from plane.app.permissions import ROLE, allow_permission
|
||||
from plane.app.serializers import (ProjectLiteSerializer,
|
||||
WorkspaceLiteSerializer)
|
||||
from plane.app.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer
|
||||
from plane.db.models import Project, Workspace
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.utils.exception_logger import log_exception
|
||||
@@ -22,6 +21,7 @@ from ..base import BaseAPIView
|
||||
|
||||
class LLMProvider:
|
||||
"""Base class for LLM provider configurations"""
|
||||
|
||||
name: str = ""
|
||||
models: List[str] = []
|
||||
default_model: str = ""
|
||||
@@ -34,11 +34,13 @@ class LLMProvider:
|
||||
"default_model": cls.default_model,
|
||||
}
|
||||
|
||||
|
||||
class OpenAIProvider(LLMProvider):
|
||||
name = "OpenAI"
|
||||
models = ["gpt-3.5-turbo", "gpt-4o-mini", "gpt-4o", "o1-mini", "o1-preview"]
|
||||
default_model = "gpt-4o-mini"
|
||||
|
||||
|
||||
class AnthropicProvider(LLMProvider):
|
||||
name = "Anthropic"
|
||||
models = [
|
||||
@@ -49,40 +51,45 @@ class AnthropicProvider(LLMProvider):
|
||||
"claude-2.1",
|
||||
"claude-2",
|
||||
"claude-instant-1.2",
|
||||
"claude-instant-1"
|
||||
"claude-instant-1",
|
||||
]
|
||||
default_model = "claude-3-sonnet-20240229"
|
||||
|
||||
|
||||
class GeminiProvider(LLMProvider):
|
||||
name = "Gemini"
|
||||
models = ["gemini-pro", "gemini-1.5-pro-latest", "gemini-pro-vision"]
|
||||
default_model = "gemini-pro"
|
||||
|
||||
|
||||
SUPPORTED_PROVIDERS = {
|
||||
"openai": OpenAIProvider,
|
||||
"anthropic": AnthropicProvider,
|
||||
"gemini": GeminiProvider,
|
||||
}
|
||||
|
||||
|
||||
def get_llm_config() -> Tuple[str | None, str | None, str | None]:
|
||||
"""
|
||||
Helper to get LLM configuration values, returns:
|
||||
- api_key, model, provider
|
||||
"""
|
||||
api_key, provider_key, model = get_configuration_value([
|
||||
{
|
||||
"key": "LLM_API_KEY",
|
||||
"default": os.environ.get("LLM_API_KEY", None),
|
||||
},
|
||||
{
|
||||
"key": "LLM_PROVIDER",
|
||||
"default": os.environ.get("LLM_PROVIDER", "openai"),
|
||||
},
|
||||
{
|
||||
"key": "LLM_MODEL",
|
||||
"default": os.environ.get("LLM_MODEL", None),
|
||||
},
|
||||
])
|
||||
api_key, provider_key, model = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "LLM_API_KEY",
|
||||
"default": os.environ.get("LLM_API_KEY", None),
|
||||
},
|
||||
{
|
||||
"key": "LLM_PROVIDER",
|
||||
"default": os.environ.get("LLM_PROVIDER", "openai"),
|
||||
},
|
||||
{
|
||||
"key": "LLM_MODEL",
|
||||
"default": os.environ.get("LLM_MODEL", None),
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
provider = SUPPORTED_PROVIDERS.get(provider_key.lower())
|
||||
if not provider:
|
||||
@@ -99,16 +106,20 @@ def get_llm_config() -> Tuple[str | None, str | None, str | None]:
|
||||
|
||||
# Validate model is supported by provider
|
||||
if model not in provider.models:
|
||||
log_exception(ValueError(
|
||||
f"Model {model} not supported by {provider.name}. "
|
||||
f"Supported models: {', '.join(provider.models)}"
|
||||
))
|
||||
log_exception(
|
||||
ValueError(
|
||||
f"Model {model} not supported by {provider.name}. "
|
||||
f"Supported models: {', '.join(provider.models)}"
|
||||
)
|
||||
)
|
||||
return None, None, None
|
||||
|
||||
return api_key, model, provider_key
|
||||
|
||||
|
||||
def get_llm_response(task, prompt, api_key: str, model: str, provider: str) -> Tuple[str | None, str | None]:
|
||||
def get_llm_response(
|
||||
task, prompt, api_key: str, model: str, provider: str
|
||||
) -> Tuple[str | None, str | None]:
|
||||
"""Helper to get LLM completion response"""
|
||||
final_text = task + "\n" + prompt
|
||||
try:
|
||||
@@ -118,10 +129,7 @@ def get_llm_response(task, prompt, api_key: str, model: str, provider: str) -> T
|
||||
|
||||
client = OpenAI(api_key=api_key)
|
||||
chat_completion = client.chat.completions.create(
|
||||
model=model,
|
||||
messages=[
|
||||
{"role": "user", "content": final_text}
|
||||
]
|
||||
model=model, messages=[{"role": "user", "content": final_text}]
|
||||
)
|
||||
text = chat_completion.choices[0].message.content
|
||||
return text, None
|
||||
@@ -135,6 +143,7 @@ def get_llm_response(task, prompt, api_key: str, model: str, provider: str) -> T
|
||||
else:
|
||||
return None, f"Error occurred while generating response from {provider}"
|
||||
|
||||
|
||||
class GPTIntegrationEndpoint(BaseAPIView):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def post(self, request, slug, project_id):
|
||||
@@ -152,7 +161,9 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
||||
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
text, error = get_llm_response(task, request.data.get("prompt", False), api_key, model, provider)
|
||||
text, error = get_llm_response(
|
||||
task, request.data.get("prompt", False), api_key, model, provider
|
||||
)
|
||||
if not text and error:
|
||||
return Response(
|
||||
{"error": "An internal error has occurred."},
|
||||
@@ -190,7 +201,9 @@ class WorkspaceGPTIntegrationEndpoint(BaseAPIView):
|
||||
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
text, error = get_llm_response(task, request.data.get("prompt", False), api_key, model, provider)
|
||||
text, error = get_llm_response(
|
||||
task, request.data.get("prompt", False), api_key, model, provider
|
||||
)
|
||||
if not text and error:
|
||||
return Response(
|
||||
{"error": "An internal error has occurred."},
|
||||
|
||||
@@ -44,6 +44,7 @@ from plane.app.views.base import BaseAPIView
|
||||
from plane.utils.timezone_converter import user_timezone_converter
|
||||
from plane.utils.global_paginator import paginate
|
||||
from plane.utils.host import base_host
|
||||
from plane.db.models.intake import SourceType
|
||||
|
||||
|
||||
class IntakeViewSet(BaseViewSet):
|
||||
@@ -278,7 +279,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
intake_id=intake_id.id,
|
||||
project_id=project_id,
|
||||
issue_id=serializer.data["id"],
|
||||
source=request.data.get("source", "IN-APP"),
|
||||
source=SourceType.IN_APP,
|
||||
)
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
@@ -408,7 +409,6 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
if issue_serializer.is_valid():
|
||||
|
||||
# Log all the updates
|
||||
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
|
||||
if issue is not None:
|
||||
@@ -607,7 +607,6 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
|
||||
|
||||
class IntakeWorkItemDescriptionVersionEndpoint(BaseAPIView):
|
||||
|
||||
def process_paginated_result(self, fields, results, timezone):
|
||||
paginated_data = results.values(*fields)
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.utils.error_codes import ERROR_CODES
|
||||
from plane.utils.host import base_host
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet, BaseAPIView
|
||||
|
||||
|
||||
@@ -23,6 +23,7 @@ from plane.settings.storage import S3Storage
|
||||
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueAttachmentEndpoint(BaseAPIView):
|
||||
serializer_class = IssueAttachmentSerializer
|
||||
model = FileAsset
|
||||
|
||||
@@ -19,6 +19,7 @@ from plane.db.models import IssueComment, ProjectMember, CommentReaction, Projec
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueCommentViewSet(BaseViewSet):
|
||||
serializer_class = IssueCommentSerializer
|
||||
model = IssueComment
|
||||
|
||||
@@ -15,8 +15,10 @@ from plane.app.serializers import IssueLinkSerializer
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import IssueLink
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.bgtasks.work_item_link_task import crawl_work_item_link_title
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueLinkViewSet(BaseViewSet):
|
||||
permission_classes = [ProjectEntityPermission]
|
||||
|
||||
@@ -43,6 +45,9 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
serializer = IssueLinkSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(project_id=project_id, issue_id=issue_id)
|
||||
crawl_work_item_link_title(
|
||||
serializer.data.get("id"), serializer.data.get("url")
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="link.activity.created",
|
||||
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
@@ -54,6 +59,10 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
notification=True,
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
issue_link = self.get_queryset().get(id=serializer.data.get("id"))
|
||||
serializer = IssueLinkSerializer(issue_link)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@@ -65,9 +74,14 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
current_instance = json.dumps(
|
||||
IssueLinkSerializer(issue_link).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
crawl_work_item_link_title(
|
||||
serializer.data.get("id"), serializer.data.get("url")
|
||||
)
|
||||
|
||||
issue_activity.delay(
|
||||
type="link.activity.updated",
|
||||
requested_data=requested_data,
|
||||
@@ -79,6 +93,9 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
notification=True,
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue_link = self.get_queryset().get(id=serializer.data.get("id"))
|
||||
serializer = IssueLinkSerializer(issue_link)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ from plane.db.models import IssueReaction
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueReactionViewSet(BaseViewSet):
|
||||
serializer_class = IssueReactionSerializer
|
||||
model = IssueReaction
|
||||
|
||||
@@ -29,6 +29,7 @@ from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_relation_mapper import get_actual_relation
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueRelationViewSet(BaseViewSet):
|
||||
serializer_class = IssueRelationSerializer
|
||||
model = IssueRelation
|
||||
|
||||
@@ -23,6 +23,8 @@ from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.timezone_converter import user_timezone_converter
|
||||
from collections import defaultdict
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
|
||||
|
||||
class SubIssuesEndpoint(BaseAPIView):
|
||||
permission_classes = [ProjectEntityPermission]
|
||||
@@ -102,6 +104,15 @@ class SubIssuesEndpoint(BaseAPIView):
|
||||
.order_by("-created_at")
|
||||
)
|
||||
|
||||
# Ordering
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
group_by = request.GET.get("group_by", False)
|
||||
|
||||
if order_by_param:
|
||||
sub_issues, order_by_param = order_issue_queryset(
|
||||
sub_issues, order_by_param
|
||||
)
|
||||
|
||||
# create's a dict with state group name with their respective issue id's
|
||||
result = defaultdict(list)
|
||||
for sub_issue in sub_issues:
|
||||
@@ -138,6 +149,26 @@ class SubIssuesEndpoint(BaseAPIView):
|
||||
sub_issues = user_timezone_converter(
|
||||
sub_issues, datetime_fields, request.user.user_timezone
|
||||
)
|
||||
# Grouping
|
||||
if group_by:
|
||||
result_dict = defaultdict(list)
|
||||
|
||||
for issue in sub_issues:
|
||||
if group_by == "assignees__ids":
|
||||
if issue["assignee_ids"]:
|
||||
assignee_ids = issue["assignee_ids"]
|
||||
for assignee_id in assignee_ids:
|
||||
result_dict[str(assignee_id)].append(issue)
|
||||
elif issue["assignee_ids"] == []:
|
||||
result_dict["None"].append(issue)
|
||||
|
||||
elif group_by:
|
||||
result_dict[str(issue[group_by])].append(issue)
|
||||
|
||||
return Response(
|
||||
{"sub_issues": result_dict, "state_distribution": result},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return Response(
|
||||
{"sub_issues": sub_issues, "state_distribution": result},
|
||||
status=status.HTTP_200_OK,
|
||||
|
||||
@@ -63,6 +63,7 @@ from .. import BaseAPIView, BaseViewSet
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ModuleViewSet(BaseViewSet):
|
||||
model = Module
|
||||
webhook_event = "module"
|
||||
@@ -710,23 +711,31 @@ class ModuleViewSet(BaseViewSet):
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
module = self.get_queryset().filter(pk=pk)
|
||||
module_queryset = self.get_queryset().filter(pk=pk)
|
||||
|
||||
if module.first().archived_at:
|
||||
current_module = module_queryset.first()
|
||||
|
||||
if not current_module:
|
||||
return Response(
|
||||
{"error": "Module not found"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
if current_module.archived_at:
|
||||
return Response(
|
||||
{"error": "Archived module cannot be updated"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
current_instance = json.dumps(
|
||||
ModuleSerializer(module.first()).data, cls=DjangoJSONEncoder
|
||||
ModuleSerializer(current_module).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
serializer = ModuleWriteSerializer(
|
||||
module.first(), data=request.data, partial=True
|
||||
current_module, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
module = module.values(
|
||||
module = module_queryset.values(
|
||||
# Required fields
|
||||
"id",
|
||||
"workspace_id",
|
||||
|
||||
@@ -36,6 +36,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
|
||||
from .. import BaseViewSet
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ModuleIssueViewSet(BaseViewSet):
|
||||
serializer_class = ModuleIssueSerializer
|
||||
model = ModuleIssue
|
||||
@@ -280,7 +281,11 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
{"module_name": module_issue.first().module.name if (module_issue.first() and module_issue.first().module) else None}
|
||||
{
|
||||
"module_name": module_issue.first().module.name
|
||||
if (module_issue.first() and module_issue.first().module)
|
||||
else None
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
|
||||
@@ -42,6 +42,7 @@ from plane.bgtasks.page_version_task import page_version
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
from plane.bgtasks.copy_s3_object import copy_s3_objects
|
||||
|
||||
|
||||
def unarchive_archive_page_and_descendants(page_id, archived_at):
|
||||
# Your SQL query
|
||||
sql = """
|
||||
@@ -198,7 +199,7 @@ class PageViewSet(BaseViewSet):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
|
||||
"""
|
||||
if the role is guest and guest_view_all_features is false and owned by is not
|
||||
if the role is guest and guest_view_all_features is false and owned by is not
|
||||
the requesting user then dont show the page
|
||||
"""
|
||||
|
||||
@@ -572,6 +573,12 @@ class PageDuplicateEndpoint(BaseAPIView):
|
||||
pk=page_id, workspace__slug=slug, projects__id=project_id
|
||||
).first()
|
||||
|
||||
# check for permission
|
||||
if page.access == Page.PRIVATE_ACCESS and page.owned_by_id != request.user.id:
|
||||
return Response(
|
||||
{"error": "Permission denied"}, status=status.HTTP_403_FORBIDDEN
|
||||
)
|
||||
|
||||
# get all the project ids where page is present
|
||||
project_ids = ProjectPage.objects.filter(page_id=page_id).values_list(
|
||||
"project_id", flat=True
|
||||
|
||||
@@ -41,6 +41,7 @@ from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
from plane.utils.exception_logger import log_exception
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ProjectViewSet(BaseViewSet):
|
||||
serializer_class = ProjectListSerializer
|
||||
model = Project
|
||||
@@ -274,14 +275,14 @@ class ProjectViewSet(BaseViewSet):
|
||||
states = [
|
||||
{
|
||||
"name": "Backlog",
|
||||
"color": "#A3A3A3",
|
||||
"color": "#60646C",
|
||||
"sequence": 15000,
|
||||
"group": "backlog",
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"name": "Todo",
|
||||
"color": "#3A3A3A",
|
||||
"color": "#60646C",
|
||||
"sequence": 25000,
|
||||
"group": "unstarted",
|
||||
},
|
||||
@@ -293,13 +294,13 @@ class ProjectViewSet(BaseViewSet):
|
||||
},
|
||||
{
|
||||
"name": "Done",
|
||||
"color": "#16A34A",
|
||||
"color": "#46A758",
|
||||
"sequence": 45000,
|
||||
"group": "completed",
|
||||
},
|
||||
{
|
||||
"name": "Cancelled",
|
||||
"color": "#EF4444",
|
||||
"color": "#9AA4BC",
|
||||
"sequence": 55000,
|
||||
"group": "cancelled",
|
||||
},
|
||||
@@ -341,7 +342,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The project name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
except Workspace.DoesNotExist:
|
||||
return Response(
|
||||
@@ -350,7 +351,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
except serializers.ValidationError:
|
||||
return Response(
|
||||
{"identifier": "The project identifier is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, pk=None):
|
||||
@@ -419,7 +420,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The project name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
except (Project.DoesNotExist, Workspace.DoesNotExist):
|
||||
return Response(
|
||||
@@ -428,7 +429,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
except serializers.ValidationError:
|
||||
return Response(
|
||||
{"identifier": "The project identifier is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, pk):
|
||||
@@ -444,7 +445,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
project = Project.objects.get(pk=pk)
|
||||
project = Project.objects.get(pk=pk, workspace__slug=slug)
|
||||
project.delete()
|
||||
webhook_activity.delay(
|
||||
event="project",
|
||||
|
||||
@@ -29,6 +29,7 @@ from plane.db.models import (
|
||||
from plane.db.models.project import ProjectNetwork
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ProjectInvitationsViewset(BaseViewSet):
|
||||
serializer_class = ProjectMemberInviteSerializer
|
||||
model = ProjectMemberInvite
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Django imports
|
||||
from django.db.models import Q
|
||||
from django.db.models import Q, QuerySet
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
@@ -12,6 +12,95 @@ from plane.utils.issue_search import search_issues
|
||||
|
||||
|
||||
class IssueSearchEndpoint(BaseAPIView):
|
||||
def filter_issues_by_project(self, project_id: int, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Filter issues by project
|
||||
"""
|
||||
|
||||
issues = issues.filter(project_id=project_id)
|
||||
|
||||
return issues
|
||||
|
||||
def search_issues_by_query(self, query: str, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Search issues by query
|
||||
"""
|
||||
|
||||
issues = search_issues(query, issues)
|
||||
|
||||
return issues
|
||||
|
||||
def search_issues_and_excluding_parent(
|
||||
self, issues: QuerySet, issue_id: str
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Search issues and epics by query excluding the parent
|
||||
"""
|
||||
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
if issue:
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id), ~Q(pk=issue.parent_id), ~Q(parent_id=issue_id)
|
||||
)
|
||||
return issues
|
||||
|
||||
def filter_issues_excluding_related_issues(
|
||||
self, issue_id: str, issues: QuerySet
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Filter issues excluding related issues
|
||||
"""
|
||||
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
related_issue_ids = (
|
||||
IssueRelation.objects.filter(Q(related_issue=issue) | Q(issue=issue))
|
||||
.values_list("issue_id", "related_issue_id")
|
||||
.distinct()
|
||||
)
|
||||
|
||||
related_issue_ids = [item for sublist in related_issue_ids for item in sublist]
|
||||
|
||||
if issue:
|
||||
issues = issues.filter(~Q(pk=issue_id), ~Q(pk__in=related_issue_ids))
|
||||
|
||||
return issues
|
||||
|
||||
def filter_root_issues_only(self, issue_id: str, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Filter root issues only
|
||||
"""
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
if issue:
|
||||
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
|
||||
if issue.parent:
|
||||
issues = issues.filter(~Q(pk=issue.parent_id))
|
||||
return issues
|
||||
|
||||
def exclude_issues_in_cycles(self, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Exclude issues in cycles
|
||||
"""
|
||||
issues = issues.exclude(
|
||||
Q(issue_cycle__isnull=False) & Q(issue_cycle__deleted_at__isnull=True)
|
||||
)
|
||||
return issues
|
||||
|
||||
def exclude_issues_in_module(self, issues: QuerySet, module: str) -> QuerySet:
|
||||
"""
|
||||
Exclude issues in a module
|
||||
"""
|
||||
issues = issues.exclude(
|
||||
Q(issue_module__module=module) & Q(issue_module__deleted_at__isnull=True)
|
||||
)
|
||||
return issues
|
||||
|
||||
def filter_issues_without_target_date(self, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Filter issues without a target date
|
||||
"""
|
||||
issues = issues.filter(target_date__isnull=True)
|
||||
return issues
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
query = request.query_params.get("search", False)
|
||||
workspace_search = request.query_params.get("workspace_search", "false")
|
||||
@@ -21,7 +110,6 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
module = request.query_params.get("module", False)
|
||||
sub_issue = request.query_params.get("sub_issue", "false")
|
||||
target_date = request.query_params.get("target_date", True)
|
||||
|
||||
issue_id = request.query_params.get("issue_id", False)
|
||||
|
||||
issues = Issue.issue_objects.filter(
|
||||
@@ -32,52 +120,28 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
if workspace_search == "false":
|
||||
issues = issues.filter(project_id=project_id)
|
||||
issues = self.filter_issues_by_project(project_id, issues)
|
||||
|
||||
if query:
|
||||
issues = search_issues(query, issues)
|
||||
issues = self.search_issues_by_query(query, issues)
|
||||
|
||||
if parent == "true" and issue_id:
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
if issue:
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id), ~Q(pk=issue.parent_id), ~Q(parent_id=issue_id)
|
||||
)
|
||||
issues = self.search_issues_and_excluding_parent(issues, issue_id)
|
||||
|
||||
if issue_relation == "true" and issue_id:
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
related_issue_ids = IssueRelation.objects.filter(
|
||||
Q(related_issue=issue) | Q(issue=issue)
|
||||
).values_list(
|
||||
"issue_id", "related_issue_id"
|
||||
).distinct()
|
||||
issues = self.filter_issues_excluding_related_issues(issue_id, issues)
|
||||
|
||||
related_issue_ids = [item for sublist in related_issue_ids for item in sublist]
|
||||
|
||||
if issue:
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id),
|
||||
~Q(pk__in=related_issue_ids),
|
||||
)
|
||||
if sub_issue == "true" and issue_id:
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
if issue:
|
||||
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
|
||||
if issue.parent:
|
||||
issues = issues.filter(~Q(pk=issue.parent_id))
|
||||
issues = self.filter_root_issues_only(issue_id, issues)
|
||||
|
||||
if cycle == "true":
|
||||
issues = issues.exclude(
|
||||
Q(issue_cycle__isnull=False) & Q(issue_cycle__deleted_at__isnull=True)
|
||||
)
|
||||
issues = self.exclude_issues_in_cycles(issues)
|
||||
|
||||
if module:
|
||||
issues = issues.exclude(
|
||||
Q(issue_module__module=module)
|
||||
& Q(issue_module__deleted_at__isnull=True)
|
||||
)
|
||||
issues = self.exclude_issues_in_module(issues, module)
|
||||
|
||||
if target_date == "none":
|
||||
issues = issues.filter(target_date__isnull=True)
|
||||
issues = self.filter_issues_without_target_date(issues)
|
||||
|
||||
if ProjectMember.objects.filter(
|
||||
project_id=project_id, member=self.request.user, is_active=True, role=5
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# Python imports
|
||||
from itertools import groupby
|
||||
from collections import defaultdict
|
||||
|
||||
# Django imports
|
||||
from django.db.utils import IntegrityError
|
||||
@@ -74,7 +75,19 @@ class StateViewSet(BaseViewSet):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def list(self, request, slug, project_id):
|
||||
states = StateSerializer(self.get_queryset(), many=True).data
|
||||
|
||||
grouped_states = defaultdict(list)
|
||||
for state in states:
|
||||
grouped_states[state["group"]].append(state)
|
||||
|
||||
for group, group_states in grouped_states.items():
|
||||
count = len(group_states)
|
||||
|
||||
for index, state in enumerate(group_states, start=1):
|
||||
state["order"] = index / count
|
||||
|
||||
grouped = request.GET.get("grouped", False)
|
||||
|
||||
if grouped == "true":
|
||||
state_dict = {}
|
||||
for key, value in groupby(
|
||||
@@ -83,6 +96,7 @@ class StateViewSet(BaseViewSet):
|
||||
):
|
||||
state_dict[str(key)] = list(value)
|
||||
return Response(state_dict, status=status.HTTP_200_OK)
|
||||
|
||||
return Response(states, status=status.HTTP_200_OK)
|
||||
|
||||
@invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False)
|
||||
|
||||
@@ -24,125 +24,152 @@ class TimezoneEndpoint(APIView):
|
||||
@method_decorator(cache_page(60 * 60 * 2))
|
||||
def get(self, request):
|
||||
timezone_locations = [
|
||||
('Midway Island', 'Pacific/Midway'), # UTC-11:00
|
||||
('American Samoa', 'Pacific/Pago_Pago'), # UTC-11:00
|
||||
('Hawaii', 'Pacific/Honolulu'), # UTC-10:00
|
||||
('Aleutian Islands', 'America/Adak'), # UTC-10:00 (DST: UTC-09:00)
|
||||
('Marquesas Islands', 'Pacific/Marquesas'), # UTC-09:30
|
||||
('Alaska', 'America/Anchorage'), # UTC-09:00 (DST: UTC-08:00)
|
||||
('Gambier Islands', 'Pacific/Gambier'), # UTC-09:00
|
||||
('Pacific Time (US and Canada)', 'America/Los_Angeles'), # UTC-08:00 (DST: UTC-07:00)
|
||||
('Baja California', 'America/Tijuana'), # UTC-08:00 (DST: UTC-07:00)
|
||||
('Mountain Time (US and Canada)', 'America/Denver'), # UTC-07:00 (DST: UTC-06:00)
|
||||
('Arizona', 'America/Phoenix'), # UTC-07:00
|
||||
('Chihuahua, Mazatlan', 'America/Chihuahua'), # UTC-07:00 (DST: UTC-06:00)
|
||||
('Central Time (US and Canada)', 'America/Chicago'), # UTC-06:00 (DST: UTC-05:00)
|
||||
('Saskatchewan', 'America/Regina'), # UTC-06:00
|
||||
('Guadalajara, Mexico City, Monterrey', 'America/Mexico_City'), # UTC-06:00 (DST: UTC-05:00)
|
||||
('Tegucigalpa, Honduras', 'America/Tegucigalpa'), # UTC-06:00
|
||||
('Costa Rica', 'America/Costa_Rica'), # UTC-06:00
|
||||
('Eastern Time (US and Canada)', 'America/New_York'), # UTC-05:00 (DST: UTC-04:00)
|
||||
('Lima', 'America/Lima'), # UTC-05:00
|
||||
('Bogota', 'America/Bogota'), # UTC-05:00
|
||||
('Quito', 'America/Guayaquil'), # UTC-05:00
|
||||
('Chetumal', 'America/Cancun'), # UTC-05:00 (DST: UTC-04:00)
|
||||
('Caracas (Old Venezuela Time)', 'America/Caracas'), # UTC-04:30
|
||||
('Atlantic Time (Canada)', 'America/Halifax'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('Caracas', 'America/Caracas'), # UTC-04:00
|
||||
('Santiago', 'America/Santiago'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('La Paz', 'America/La_Paz'), # UTC-04:00
|
||||
('Manaus', 'America/Manaus'), # UTC-04:00
|
||||
('Georgetown', 'America/Guyana'), # UTC-04:00
|
||||
('Bermuda', 'Atlantic/Bermuda'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('Newfoundland Time (Canada)', 'America/St_Johns'), # UTC-03:30 (DST: UTC-02:30)
|
||||
('Buenos Aires', 'America/Argentina/Buenos_Aires'), # UTC-03:00
|
||||
('Brasilia', 'America/Sao_Paulo'), # UTC-03:00
|
||||
('Greenland', 'America/Godthab'), # UTC-03:00 (DST: UTC-02:00)
|
||||
('Montevideo', 'America/Montevideo'), # UTC-03:00
|
||||
('Falkland Islands', 'Atlantic/Stanley'), # UTC-03:00
|
||||
('South Georgia and the South Sandwich Islands', 'Atlantic/South_Georgia'), # UTC-02:00
|
||||
('Azores', 'Atlantic/Azores'), # UTC-01:00 (DST: UTC+00:00)
|
||||
('Cape Verde Islands', 'Atlantic/Cape_Verde'), # UTC-01:00
|
||||
('Dublin', 'Europe/Dublin'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Reykjavik', 'Atlantic/Reykjavik'), # UTC+00:00
|
||||
('Lisbon', 'Europe/Lisbon'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Monrovia', 'Africa/Monrovia'), # UTC+00:00
|
||||
('Casablanca', 'Africa/Casablanca'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Central European Time (Berlin, Rome, Paris)', 'Europe/Paris'), # UTC+01:00 (DST: UTC+02:00)
|
||||
('West Central Africa', 'Africa/Lagos'), # UTC+01:00
|
||||
('Algiers', 'Africa/Algiers'), # UTC+01:00
|
||||
('Lagos', 'Africa/Lagos'), # UTC+01:00
|
||||
('Tunis', 'Africa/Tunis'), # UTC+01:00
|
||||
('Eastern European Time (Cairo, Helsinki, Kyiv)', 'Europe/Kiev'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Athens', 'Europe/Athens'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Jerusalem', 'Asia/Jerusalem'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Johannesburg', 'Africa/Johannesburg'), # UTC+02:00
|
||||
('Harare, Pretoria', 'Africa/Harare'), # UTC+02:00
|
||||
('Moscow Time', 'Europe/Moscow'), # UTC+03:00
|
||||
('Baghdad', 'Asia/Baghdad'), # UTC+03:00
|
||||
('Nairobi', 'Africa/Nairobi'), # UTC+03:00
|
||||
('Kuwait, Riyadh', 'Asia/Riyadh'), # UTC+03:00
|
||||
('Tehran', 'Asia/Tehran'), # UTC+03:30 (DST: UTC+04:30)
|
||||
('Abu Dhabi', 'Asia/Dubai'), # UTC+04:00
|
||||
('Baku', 'Asia/Baku'), # UTC+04:00 (DST: UTC+05:00)
|
||||
('Yerevan', 'Asia/Yerevan'), # UTC+04:00 (DST: UTC+05:00)
|
||||
('Astrakhan', 'Europe/Astrakhan'), # UTC+04:00
|
||||
('Tbilisi', 'Asia/Tbilisi'), # UTC+04:00
|
||||
('Mauritius', 'Indian/Mauritius'), # UTC+04:00
|
||||
('Islamabad', 'Asia/Karachi'), # UTC+05:00
|
||||
('Karachi', 'Asia/Karachi'), # UTC+05:00
|
||||
('Tashkent', 'Asia/Tashkent'), # UTC+05:00
|
||||
('Yekaterinburg', 'Asia/Yekaterinburg'), # UTC+05:00
|
||||
('Maldives', 'Indian/Maldives'), # UTC+05:00
|
||||
('Chagos', 'Indian/Chagos'), # UTC+05:00
|
||||
('Chennai', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Kolkata', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Mumbai', 'Asia/Kolkata'), # UTC+05:30
|
||||
('New Delhi', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Sri Jayawardenepura', 'Asia/Colombo'), # UTC+05:30
|
||||
('Kathmandu', 'Asia/Kathmandu'), # UTC+05:45
|
||||
('Dhaka', 'Asia/Dhaka'), # UTC+06:00
|
||||
('Almaty', 'Asia/Almaty'), # UTC+06:00
|
||||
('Bishkek', 'Asia/Bishkek'), # UTC+06:00
|
||||
('Thimphu', 'Asia/Thimphu'), # UTC+06:00
|
||||
('Yangon (Rangoon)', 'Asia/Yangon'), # UTC+06:30
|
||||
('Cocos Islands', 'Indian/Cocos'), # UTC+06:30
|
||||
('Bangkok', 'Asia/Bangkok'), # UTC+07:00
|
||||
('Hanoi', 'Asia/Ho_Chi_Minh'), # UTC+07:00
|
||||
('Jakarta', 'Asia/Jakarta'), # UTC+07:00
|
||||
('Novosibirsk', 'Asia/Novosibirsk'), # UTC+07:00
|
||||
('Krasnoyarsk', 'Asia/Krasnoyarsk'), # UTC+07:00
|
||||
('Beijing', 'Asia/Shanghai'), # UTC+08:00
|
||||
('Singapore', 'Asia/Singapore'), # UTC+08:00
|
||||
('Perth', 'Australia/Perth'), # UTC+08:00
|
||||
('Hong Kong', 'Asia/Hong_Kong'), # UTC+08:00
|
||||
('Ulaanbaatar', 'Asia/Ulaanbaatar'), # UTC+08:00
|
||||
('Palau', 'Pacific/Palau'), # UTC+08:00
|
||||
('Eucla', 'Australia/Eucla'), # UTC+08:45
|
||||
('Tokyo', 'Asia/Tokyo'), # UTC+09:00
|
||||
('Seoul', 'Asia/Seoul'), # UTC+09:00
|
||||
('Yakutsk', 'Asia/Yakutsk'), # UTC+09:00
|
||||
('Adelaide', 'Australia/Adelaide'), # UTC+09:30 (DST: UTC+10:30)
|
||||
('Darwin', 'Australia/Darwin'), # UTC+09:30
|
||||
('Sydney', 'Australia/Sydney'), # UTC+10:00 (DST: UTC+11:00)
|
||||
('Brisbane', 'Australia/Brisbane'), # UTC+10:00
|
||||
('Guam', 'Pacific/Guam'), # UTC+10:00
|
||||
('Vladivostok', 'Asia/Vladivostok'), # UTC+10:00
|
||||
('Tahiti', 'Pacific/Tahiti'), # UTC+10:00
|
||||
('Lord Howe Island', 'Australia/Lord_Howe'), # UTC+10:30 (DST: UTC+11:00)
|
||||
('Solomon Islands', 'Pacific/Guadalcanal'), # UTC+11:00
|
||||
('Magadan', 'Asia/Magadan'), # UTC+11:00
|
||||
('Norfolk Island', 'Pacific/Norfolk'), # UTC+11:00
|
||||
('Bougainville Island', 'Pacific/Bougainville'), # UTC+11:00
|
||||
('Chokurdakh', 'Asia/Srednekolymsk'), # UTC+11:00
|
||||
('Auckland', 'Pacific/Auckland'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Wellington', 'Pacific/Auckland'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Fiji Islands', 'Pacific/Fiji'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Anadyr', 'Asia/Anadyr'), # UTC+12:00
|
||||
('Chatham Islands', 'Pacific/Chatham'), # UTC+12:45 (DST: UTC+13:45)
|
||||
("Nuku'alofa", 'Pacific/Tongatapu'), # UTC+13:00
|
||||
('Samoa', 'Pacific/Apia'), # UTC+13:00 (DST: UTC+14:00)
|
||||
('Kiritimati Island', 'Pacific/Kiritimati') # UTC+14:00
|
||||
("Midway Island", "Pacific/Midway"), # UTC-11:00
|
||||
("American Samoa", "Pacific/Pago_Pago"), # UTC-11:00
|
||||
("Hawaii", "Pacific/Honolulu"), # UTC-10:00
|
||||
("Aleutian Islands", "America/Adak"), # UTC-10:00 (DST: UTC-09:00)
|
||||
("Marquesas Islands", "Pacific/Marquesas"), # UTC-09:30
|
||||
("Alaska", "America/Anchorage"), # UTC-09:00 (DST: UTC-08:00)
|
||||
("Gambier Islands", "Pacific/Gambier"), # UTC-09:00
|
||||
(
|
||||
"Pacific Time (US and Canada)",
|
||||
"America/Los_Angeles",
|
||||
), # UTC-08:00 (DST: UTC-07:00)
|
||||
("Baja California", "America/Tijuana"), # UTC-08:00 (DST: UTC-07:00)
|
||||
(
|
||||
"Mountain Time (US and Canada)",
|
||||
"America/Denver",
|
||||
), # UTC-07:00 (DST: UTC-06:00)
|
||||
("Arizona", "America/Phoenix"), # UTC-07:00
|
||||
("Chihuahua, Mazatlan", "America/Chihuahua"), # UTC-07:00 (DST: UTC-06:00)
|
||||
(
|
||||
"Central Time (US and Canada)",
|
||||
"America/Chicago",
|
||||
), # UTC-06:00 (DST: UTC-05:00)
|
||||
("Saskatchewan", "America/Regina"), # UTC-06:00
|
||||
(
|
||||
"Guadalajara, Mexico City, Monterrey",
|
||||
"America/Mexico_City",
|
||||
), # UTC-06:00 (DST: UTC-05:00)
|
||||
("Tegucigalpa, Honduras", "America/Tegucigalpa"), # UTC-06:00
|
||||
("Costa Rica", "America/Costa_Rica"), # UTC-06:00
|
||||
(
|
||||
"Eastern Time (US and Canada)",
|
||||
"America/New_York",
|
||||
), # UTC-05:00 (DST: UTC-04:00)
|
||||
("Lima", "America/Lima"), # UTC-05:00
|
||||
("Bogota", "America/Bogota"), # UTC-05:00
|
||||
("Quito", "America/Guayaquil"), # UTC-05:00
|
||||
("Chetumal", "America/Cancun"), # UTC-05:00 (DST: UTC-04:00)
|
||||
("Caracas (Old Venezuela Time)", "America/Caracas"), # UTC-04:30
|
||||
("Atlantic Time (Canada)", "America/Halifax"), # UTC-04:00 (DST: UTC-03:00)
|
||||
("Caracas", "America/Caracas"), # UTC-04:00
|
||||
("Santiago", "America/Santiago"), # UTC-04:00 (DST: UTC-03:00)
|
||||
("La Paz", "America/La_Paz"), # UTC-04:00
|
||||
("Manaus", "America/Manaus"), # UTC-04:00
|
||||
("Georgetown", "America/Guyana"), # UTC-04:00
|
||||
("Bermuda", "Atlantic/Bermuda"), # UTC-04:00 (DST: UTC-03:00)
|
||||
(
|
||||
"Newfoundland Time (Canada)",
|
||||
"America/St_Johns",
|
||||
), # UTC-03:30 (DST: UTC-02:30)
|
||||
("Buenos Aires", "America/Argentina/Buenos_Aires"), # UTC-03:00
|
||||
("Brasilia", "America/Sao_Paulo"), # UTC-03:00
|
||||
("Greenland", "America/Godthab"), # UTC-03:00 (DST: UTC-02:00)
|
||||
("Montevideo", "America/Montevideo"), # UTC-03:00
|
||||
("Falkland Islands", "Atlantic/Stanley"), # UTC-03:00
|
||||
(
|
||||
"South Georgia and the South Sandwich Islands",
|
||||
"Atlantic/South_Georgia",
|
||||
), # UTC-02:00
|
||||
("Azores", "Atlantic/Azores"), # UTC-01:00 (DST: UTC+00:00)
|
||||
("Cape Verde Islands", "Atlantic/Cape_Verde"), # UTC-01:00
|
||||
("Dublin", "Europe/Dublin"), # UTC+00:00 (DST: UTC+01:00)
|
||||
("Reykjavik", "Atlantic/Reykjavik"), # UTC+00:00
|
||||
("Lisbon", "Europe/Lisbon"), # UTC+00:00 (DST: UTC+01:00)
|
||||
("Monrovia", "Africa/Monrovia"), # UTC+00:00
|
||||
("Casablanca", "Africa/Casablanca"), # UTC+00:00 (DST: UTC+01:00)
|
||||
(
|
||||
"Central European Time (Berlin, Rome, Paris)",
|
||||
"Europe/Paris",
|
||||
), # UTC+01:00 (DST: UTC+02:00)
|
||||
("West Central Africa", "Africa/Lagos"), # UTC+01:00
|
||||
("Algiers", "Africa/Algiers"), # UTC+01:00
|
||||
("Lagos", "Africa/Lagos"), # UTC+01:00
|
||||
("Tunis", "Africa/Tunis"), # UTC+01:00
|
||||
(
|
||||
"Eastern European Time (Cairo, Helsinki, Kyiv)",
|
||||
"Europe/Kiev",
|
||||
), # UTC+02:00 (DST: UTC+03:00)
|
||||
("Athens", "Europe/Athens"), # UTC+02:00 (DST: UTC+03:00)
|
||||
("Jerusalem", "Asia/Jerusalem"), # UTC+02:00 (DST: UTC+03:00)
|
||||
("Johannesburg", "Africa/Johannesburg"), # UTC+02:00
|
||||
("Harare, Pretoria", "Africa/Harare"), # UTC+02:00
|
||||
("Moscow Time", "Europe/Moscow"), # UTC+03:00
|
||||
("Baghdad", "Asia/Baghdad"), # UTC+03:00
|
||||
("Nairobi", "Africa/Nairobi"), # UTC+03:00
|
||||
("Kuwait, Riyadh", "Asia/Riyadh"), # UTC+03:00
|
||||
("Tehran", "Asia/Tehran"), # UTC+03:30 (DST: UTC+04:30)
|
||||
("Abu Dhabi", "Asia/Dubai"), # UTC+04:00
|
||||
("Baku", "Asia/Baku"), # UTC+04:00 (DST: UTC+05:00)
|
||||
("Yerevan", "Asia/Yerevan"), # UTC+04:00 (DST: UTC+05:00)
|
||||
("Astrakhan", "Europe/Astrakhan"), # UTC+04:00
|
||||
("Tbilisi", "Asia/Tbilisi"), # UTC+04:00
|
||||
("Mauritius", "Indian/Mauritius"), # UTC+04:00
|
||||
("Islamabad", "Asia/Karachi"), # UTC+05:00
|
||||
("Karachi", "Asia/Karachi"), # UTC+05:00
|
||||
("Tashkent", "Asia/Tashkent"), # UTC+05:00
|
||||
("Yekaterinburg", "Asia/Yekaterinburg"), # UTC+05:00
|
||||
("Maldives", "Indian/Maldives"), # UTC+05:00
|
||||
("Chagos", "Indian/Chagos"), # UTC+05:00
|
||||
("Chennai", "Asia/Kolkata"), # UTC+05:30
|
||||
("Kolkata", "Asia/Kolkata"), # UTC+05:30
|
||||
("Mumbai", "Asia/Kolkata"), # UTC+05:30
|
||||
("New Delhi", "Asia/Kolkata"), # UTC+05:30
|
||||
("Sri Jayawardenepura", "Asia/Colombo"), # UTC+05:30
|
||||
("Kathmandu", "Asia/Kathmandu"), # UTC+05:45
|
||||
("Dhaka", "Asia/Dhaka"), # UTC+06:00
|
||||
("Almaty", "Asia/Almaty"), # UTC+06:00
|
||||
("Bishkek", "Asia/Bishkek"), # UTC+06:00
|
||||
("Thimphu", "Asia/Thimphu"), # UTC+06:00
|
||||
("Yangon (Rangoon)", "Asia/Yangon"), # UTC+06:30
|
||||
("Cocos Islands", "Indian/Cocos"), # UTC+06:30
|
||||
("Bangkok", "Asia/Bangkok"), # UTC+07:00
|
||||
("Hanoi", "Asia/Ho_Chi_Minh"), # UTC+07:00
|
||||
("Jakarta", "Asia/Jakarta"), # UTC+07:00
|
||||
("Novosibirsk", "Asia/Novosibirsk"), # UTC+07:00
|
||||
("Krasnoyarsk", "Asia/Krasnoyarsk"), # UTC+07:00
|
||||
("Beijing", "Asia/Shanghai"), # UTC+08:00
|
||||
("Singapore", "Asia/Singapore"), # UTC+08:00
|
||||
("Perth", "Australia/Perth"), # UTC+08:00
|
||||
("Hong Kong", "Asia/Hong_Kong"), # UTC+08:00
|
||||
("Ulaanbaatar", "Asia/Ulaanbaatar"), # UTC+08:00
|
||||
("Palau", "Pacific/Palau"), # UTC+08:00
|
||||
("Eucla", "Australia/Eucla"), # UTC+08:45
|
||||
("Tokyo", "Asia/Tokyo"), # UTC+09:00
|
||||
("Seoul", "Asia/Seoul"), # UTC+09:00
|
||||
("Yakutsk", "Asia/Yakutsk"), # UTC+09:00
|
||||
("Adelaide", "Australia/Adelaide"), # UTC+09:30 (DST: UTC+10:30)
|
||||
("Darwin", "Australia/Darwin"), # UTC+09:30
|
||||
("Sydney", "Australia/Sydney"), # UTC+10:00 (DST: UTC+11:00)
|
||||
("Brisbane", "Australia/Brisbane"), # UTC+10:00
|
||||
("Guam", "Pacific/Guam"), # UTC+10:00
|
||||
("Vladivostok", "Asia/Vladivostok"), # UTC+10:00
|
||||
("Tahiti", "Pacific/Tahiti"), # UTC+10:00
|
||||
("Lord Howe Island", "Australia/Lord_Howe"), # UTC+10:30 (DST: UTC+11:00)
|
||||
("Solomon Islands", "Pacific/Guadalcanal"), # UTC+11:00
|
||||
("Magadan", "Asia/Magadan"), # UTC+11:00
|
||||
("Norfolk Island", "Pacific/Norfolk"), # UTC+11:00
|
||||
("Bougainville Island", "Pacific/Bougainville"), # UTC+11:00
|
||||
("Chokurdakh", "Asia/Srednekolymsk"), # UTC+11:00
|
||||
("Auckland", "Pacific/Auckland"), # UTC+12:00 (DST: UTC+13:00)
|
||||
("Wellington", "Pacific/Auckland"), # UTC+12:00 (DST: UTC+13:00)
|
||||
("Fiji Islands", "Pacific/Fiji"), # UTC+12:00 (DST: UTC+13:00)
|
||||
("Anadyr", "Asia/Anadyr"), # UTC+12:00
|
||||
("Chatham Islands", "Pacific/Chatham"), # UTC+12:45 (DST: UTC+13:45)
|
||||
("Nuku'alofa", "Pacific/Tongatapu"), # UTC+13:00
|
||||
("Samoa", "Pacific/Apia"), # UTC+13:00 (DST: UTC+14:00)
|
||||
("Kiritimati Island", "Pacific/Kiritimati"), # UTC+14:00
|
||||
]
|
||||
|
||||
timezone_list = []
|
||||
@@ -150,7 +177,6 @@ class TimezoneEndpoint(APIView):
|
||||
|
||||
# Process timezone mapping
|
||||
for friendly_name, tz_identifier in timezone_locations:
|
||||
|
||||
try:
|
||||
tz = pytz.timezone(tz_identifier)
|
||||
current_offset = now.astimezone(tz).strftime("%z")
|
||||
|
||||
@@ -29,7 +29,7 @@ class WebhookEndpoint(BaseAPIView):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"error": "URL already exists for the workspace"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
raise IntegrityError
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ import csv
|
||||
import io
|
||||
import os
|
||||
from datetime import date
|
||||
import uuid
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from django.db import IntegrityError
|
||||
@@ -35,6 +36,7 @@ from plane.db.models import (
|
||||
Workspace,
|
||||
WorkspaceMember,
|
||||
WorkspaceTheme,
|
||||
Profile
|
||||
)
|
||||
from plane.app.permissions import ROLE, allow_permission
|
||||
from django.utils.decorators import method_decorator
|
||||
@@ -42,6 +44,8 @@ from django.views.decorators.cache import cache_control
|
||||
from django.views.decorators.vary import vary_on_cookie
|
||||
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.bgtasks.workspace_seed_task import workspace_seed
|
||||
from plane.utils.url import contains_url
|
||||
|
||||
|
||||
class WorkSpaceViewSet(BaseViewSet):
|
||||
@@ -108,6 +112,12 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if contains_url(name):
|
||||
return Response(
|
||||
{"error": "Name cannot contain a URL"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if serializer.is_valid(raise_exception=True):
|
||||
serializer.save(owner=request.user)
|
||||
# Create Workspace member
|
||||
@@ -119,11 +129,15 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
# Get total members and role
|
||||
total_members=WorkspaceMember.objects.filter(workspace_id=serializer.data["id"]).count()
|
||||
total_members = WorkspaceMember.objects.filter(
|
||||
workspace_id=serializer.data["id"]
|
||||
).count()
|
||||
data = serializer.data
|
||||
data["total_members"] = total_members
|
||||
data["role"] = 20
|
||||
|
||||
workspace_seed.delay(serializer.data["id"])
|
||||
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
return Response(
|
||||
[serializer.errors[error][0] for error in serializer.errors],
|
||||
@@ -134,7 +148,7 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"slug": "The workspace with the slug already exists"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE")
|
||||
@@ -145,8 +159,19 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
return super().partial_update(request, *args, **kwargs)
|
||||
|
||||
|
||||
def remove_last_workspace_ids_from_user_settings(self, id: uuid.UUID) -> None:
|
||||
"""
|
||||
Remove the last workspace id from the user settings
|
||||
"""
|
||||
Profile.objects.filter(last_workspace_id=id).update(last_workspace_id=None)
|
||||
return
|
||||
|
||||
@allow_permission([ROLE.ADMIN], level="WORKSPACE")
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
# Get the workspace
|
||||
workspace = self.get_object()
|
||||
self.remove_last_workspace_ids_from_user_settings(workspace.id)
|
||||
return super().destroy(request, *args, **kwargs)
|
||||
|
||||
|
||||
@@ -167,10 +192,9 @@ class UserWorkSpacesEndpoint(BaseAPIView):
|
||||
.values("count")
|
||||
)
|
||||
|
||||
role = (
|
||||
WorkspaceMember.objects.filter(workspace=OuterRef("id"), member=request.user, is_active=True)
|
||||
.values("role")
|
||||
)
|
||||
role = WorkspaceMember.objects.filter(
|
||||
workspace=OuterRef("id"), member=request.user, is_active=True
|
||||
).values("role")
|
||||
|
||||
workspace = (
|
||||
Workspace.objects.prefetch_related(
|
||||
|
||||
@@ -12,6 +12,7 @@ from plane.app.permissions import WorkspaceViewerPermission
|
||||
from plane.app.serializers.cycle import CycleSerializer
|
||||
from plane.utils.timezone_converter import user_timezone_converter
|
||||
|
||||
|
||||
class WorkspaceCyclesEndpoint(BaseAPIView):
|
||||
permission_classes = [WorkspaceViewerPermission]
|
||||
|
||||
@@ -29,6 +30,7 @@ class WorkspaceCyclesEndpoint(BaseAPIView):
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -38,6 +38,7 @@ from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
model = DraftIssue
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ from plane.app.views.base import BaseAPIView
|
||||
from plane.db.models import State
|
||||
from plane.app.permissions import WorkspaceEntityPermission
|
||||
from plane.utils.cache import cache_response
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
class WorkspaceStatesEndpoint(BaseAPIView):
|
||||
@@ -22,5 +23,16 @@ class WorkspaceStatesEndpoint(BaseAPIView):
|
||||
project__archived_at__isnull=True,
|
||||
is_triage=False,
|
||||
)
|
||||
|
||||
grouped_states = defaultdict(list)
|
||||
for state in states:
|
||||
grouped_states[state.group].append(state)
|
||||
|
||||
for group, group_states in grouped_states.items():
|
||||
count = len(group_states)
|
||||
|
||||
for index, state in enumerate(group_states, start=1):
|
||||
state.order = index / count
|
||||
|
||||
serializer = StateSerializer(states, many=True).data
|
||||
return Response(serializer, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -27,10 +27,7 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
|
||||
|
||||
create_preference_keys = []
|
||||
|
||||
keys = [
|
||||
key
|
||||
for key, _ in WorkspaceUserPreference.UserPreferenceKeys.choices
|
||||
]
|
||||
keys = [key for key, _ in WorkspaceUserPreference.UserPreferenceKeys.choices]
|
||||
|
||||
for preference in keys:
|
||||
if preference not in get_preference.values_list("key", flat=True):
|
||||
@@ -39,7 +36,10 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
|
||||
preference = WorkspaceUserPreference.objects.bulk_create(
|
||||
[
|
||||
WorkspaceUserPreference(
|
||||
key=key, user=request.user, workspace=workspace, sort_order=(65535 + (i*10000))
|
||||
key=key,
|
||||
user=request.user,
|
||||
workspace=workspace,
|
||||
sort_order=(65535 + (i * 10000)),
|
||||
)
|
||||
for i, key in enumerate(create_preference_keys)
|
||||
],
|
||||
@@ -47,10 +47,13 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
preferences = WorkspaceUserPreference.objects.filter(
|
||||
user=request.user, workspace_id=workspace.id
|
||||
).order_by("sort_order").values("key", "is_pinned", "sort_order")
|
||||
|
||||
preferences = (
|
||||
WorkspaceUserPreference.objects.filter(
|
||||
user=request.user, workspace_id=workspace.id
|
||||
)
|
||||
.order_by("sort_order")
|
||||
.values("key", "is_pinned", "sort_order")
|
||||
)
|
||||
|
||||
user_preferences = {}
|
||||
|
||||
@@ -58,7 +61,7 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
|
||||
user_preferences[(str(preference["key"]))] = {
|
||||
"is_pinned": preference["is_pinned"],
|
||||
"sort_order": preference["sort_order"],
|
||||
}
|
||||
}
|
||||
return Response(
|
||||
user_preferences,
|
||||
status=status.HTTP_200_OK,
|
||||
|
||||
@@ -18,6 +18,7 @@ from plane.bgtasks.user_activation_email_task import user_activation_email
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
|
||||
class Adapter:
|
||||
"""Common interface for all auth providers"""
|
||||
|
||||
|
||||
@@ -41,7 +41,6 @@ AUTHENTICATION_ERROR_CODES = {
|
||||
"GOOGLE_OAUTH_PROVIDER_ERROR": 5115,
|
||||
"GITHUB_OAUTH_PROVIDER_ERROR": 5120,
|
||||
"GITLAB_OAUTH_PROVIDER_ERROR": 5121,
|
||||
|
||||
# Reset Password
|
||||
"INVALID_PASSWORD_TOKEN": 5125,
|
||||
"EXPIRED_PASSWORD_TOKEN": 5130,
|
||||
|
||||
@@ -25,23 +25,24 @@ class GitHubOAuthProvider(OauthAdapter):
|
||||
|
||||
organization_scope = "read:org"
|
||||
|
||||
|
||||
def __init__(self, request, code=None, state=None, callback=None):
|
||||
GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, GITHUB_ORGANIZATION_ID = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "GITHUB_CLIENT_ID",
|
||||
"default": os.environ.get("GITHUB_CLIENT_ID"),
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_CLIENT_SECRET",
|
||||
"default": os.environ.get("GITHUB_CLIENT_SECRET"),
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_ORGANIZATION_ID",
|
||||
"default": os.environ.get("GITHUB_ORGANIZATION_ID"),
|
||||
},
|
||||
]
|
||||
GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, GITHUB_ORGANIZATION_ID = (
|
||||
get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "GITHUB_CLIENT_ID",
|
||||
"default": os.environ.get("GITHUB_CLIENT_ID"),
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_CLIENT_SECRET",
|
||||
"default": os.environ.get("GITHUB_CLIENT_SECRET"),
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_ORGANIZATION_ID",
|
||||
"default": os.environ.get("GITHUB_ORGANIZATION_ID"),
|
||||
},
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if not (GITHUB_CLIENT_ID and GITHUB_CLIENT_SECRET):
|
||||
@@ -128,7 +129,10 @@ class GitHubOAuthProvider(OauthAdapter):
|
||||
|
||||
def is_user_in_organization(self, github_username):
|
||||
headers = {"Authorization": f"Bearer {self.token_data.get('access_token')}"}
|
||||
response = requests.get(f"{self.org_membership_url}/{self.organization_id}/memberships/{github_username}", headers=headers)
|
||||
response = requests.get(
|
||||
f"{self.org_membership_url}/{self.organization_id}/memberships/{github_username}",
|
||||
headers=headers,
|
||||
)
|
||||
return response.status_code == 200 # 200 means the user is a member
|
||||
|
||||
def set_user_data(self):
|
||||
@@ -145,7 +149,6 @@ class GitHubOAuthProvider(OauthAdapter):
|
||||
error_message="GITHUB_USER_NOT_IN_ORG",
|
||||
)
|
||||
|
||||
|
||||
email = self.__get_email(headers=headers)
|
||||
super().set_user_data(
|
||||
{
|
||||
|
||||
@@ -42,11 +42,11 @@ urlpatterns = [
|
||||
# credentials
|
||||
path("sign-in/", SignInAuthEndpoint.as_view(), name="sign-in"),
|
||||
path("sign-up/", SignUpAuthEndpoint.as_view(), name="sign-up"),
|
||||
path("spaces/sign-in/", SignInAuthSpaceEndpoint.as_view(), name="sign-in"),
|
||||
path("spaces/sign-up/", SignUpAuthSpaceEndpoint.as_view(), name="sign-in"),
|
||||
path("spaces/sign-in/", SignInAuthSpaceEndpoint.as_view(), name="space-sign-in"),
|
||||
path("spaces/sign-up/", SignUpAuthSpaceEndpoint.as_view(), name="space-sign-up"),
|
||||
# signout
|
||||
path("sign-out/", SignOutAuthEndpoint.as_view(), name="sign-out"),
|
||||
path("spaces/sign-out/", SignOutAuthSpaceEndpoint.as_view(), name="sign-out"),
|
||||
path("spaces/sign-out/", SignOutAuthSpaceEndpoint.as_view(), name="space-sign-out"),
|
||||
# csrf token
|
||||
path("get-csrf-token/", CSRFTokenEndpoint.as_view(), name="get_csrf_token"),
|
||||
# Magic sign in
|
||||
@@ -56,17 +56,17 @@ urlpatterns = [
|
||||
path(
|
||||
"spaces/magic-generate/",
|
||||
MagicGenerateSpaceEndpoint.as_view(),
|
||||
name="magic-generate",
|
||||
name="space-magic-generate",
|
||||
),
|
||||
path(
|
||||
"spaces/magic-sign-in/",
|
||||
MagicSignInSpaceEndpoint.as_view(),
|
||||
name="magic-sign-in",
|
||||
name="space-magic-sign-in",
|
||||
),
|
||||
path(
|
||||
"spaces/magic-sign-up/",
|
||||
MagicSignUpSpaceEndpoint.as_view(),
|
||||
name="magic-sign-up",
|
||||
name="space-magic-sign-up",
|
||||
),
|
||||
## Google Oauth
|
||||
path("google/", GoogleOauthInitiateEndpoint.as_view(), name="google-initiate"),
|
||||
@@ -74,12 +74,12 @@ urlpatterns = [
|
||||
path(
|
||||
"spaces/google/",
|
||||
GoogleOauthInitiateSpaceEndpoint.as_view(),
|
||||
name="google-initiate",
|
||||
name="space-google-initiate",
|
||||
),
|
||||
path(
|
||||
"google/callback/",
|
||||
"spaces/google/callback/",
|
||||
GoogleCallbackSpaceEndpoint.as_view(),
|
||||
name="google-callback",
|
||||
name="space-google-callback",
|
||||
),
|
||||
## Github Oauth
|
||||
path("github/", GitHubOauthInitiateEndpoint.as_view(), name="github-initiate"),
|
||||
@@ -87,12 +87,12 @@ urlpatterns = [
|
||||
path(
|
||||
"spaces/github/",
|
||||
GitHubOauthInitiateSpaceEndpoint.as_view(),
|
||||
name="github-initiate",
|
||||
name="space-github-initiate",
|
||||
),
|
||||
path(
|
||||
"spaces/github/callback/",
|
||||
GitHubCallbackSpaceEndpoint.as_view(),
|
||||
name="github-callback",
|
||||
name="space-github-callback",
|
||||
),
|
||||
## Gitlab Oauth
|
||||
path("gitlab/", GitLabOauthInitiateEndpoint.as_view(), name="gitlab-initiate"),
|
||||
@@ -100,12 +100,12 @@ urlpatterns = [
|
||||
path(
|
||||
"spaces/gitlab/",
|
||||
GitLabOauthInitiateSpaceEndpoint.as_view(),
|
||||
name="gitlab-initiate",
|
||||
name="space-gitlab-initiate",
|
||||
),
|
||||
path(
|
||||
"spaces/gitlab/callback/",
|
||||
GitLabCallbackSpaceEndpoint.as_view(),
|
||||
name="gitlab-callback",
|
||||
name="space-gitlab-callback",
|
||||
),
|
||||
# Email Check
|
||||
path("email-check/", EmailCheckEndpoint.as_view(), name="email-check"),
|
||||
@@ -120,12 +120,12 @@ urlpatterns = [
|
||||
path(
|
||||
"spaces/forgot-password/",
|
||||
ForgotPasswordSpaceEndpoint.as_view(),
|
||||
name="forgot-password",
|
||||
name="space-forgot-password",
|
||||
),
|
||||
path(
|
||||
"spaces/reset-password/<uidb64>/<token>/",
|
||||
ResetPasswordSpaceEndpoint.as_view(),
|
||||
name="forgot-password",
|
||||
name="space-forgot-password",
|
||||
),
|
||||
path("change-password/", ChangePasswordEndpoint.as_view(), name="forgot-password"),
|
||||
path("set-password/", SetUserPasswordEndpoint.as_view(), name="set-password"),
|
||||
|
||||
@@ -1,30 +1,53 @@
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.http import HttpRequest
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.request import Request
|
||||
|
||||
# Module imports
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
def base_host(request: Request | HttpRequest, is_admin: bool = False, is_space: bool = False, is_app: bool = False) -> str:
|
||||
|
||||
def base_host(
|
||||
request: Request | HttpRequest,
|
||||
is_admin: bool = False,
|
||||
is_space: bool = False,
|
||||
is_app: bool = False,
|
||||
) -> str:
|
||||
"""Utility function to return host / origin from the request"""
|
||||
# Calculate the base origin from request
|
||||
base_origin = settings.WEB_URL or settings.APP_BASE_URL
|
||||
|
||||
# Admin redirections
|
||||
# Admin redirection
|
||||
if is_admin:
|
||||
if settings.ADMIN_BASE_URL:
|
||||
return settings.ADMIN_BASE_URL
|
||||
else:
|
||||
return base_origin + "/god-mode/"
|
||||
admin_base_path = getattr(settings, "ADMIN_BASE_PATH", None)
|
||||
if not isinstance(admin_base_path, str):
|
||||
admin_base_path = "/god-mode/"
|
||||
if not admin_base_path.startswith("/"):
|
||||
admin_base_path = "/" + admin_base_path
|
||||
if not admin_base_path.endswith("/"):
|
||||
admin_base_path += "/"
|
||||
|
||||
# Space redirections
|
||||
if is_space:
|
||||
if settings.SPACE_BASE_URL:
|
||||
return settings.SPACE_BASE_URL
|
||||
if settings.ADMIN_BASE_URL:
|
||||
return settings.ADMIN_BASE_URL + admin_base_path
|
||||
else:
|
||||
return base_origin + "/spaces/"
|
||||
return base_origin + admin_base_path
|
||||
|
||||
# Space redirection
|
||||
if is_space:
|
||||
space_base_path = getattr(settings, "SPACE_BASE_PATH", None)
|
||||
if not isinstance(space_base_path, str):
|
||||
space_base_path = "/spaces/"
|
||||
if not space_base_path.startswith("/"):
|
||||
space_base_path = "/" + space_base_path
|
||||
if not space_base_path.endswith("/"):
|
||||
space_base_path += "/"
|
||||
|
||||
if settings.SPACE_BASE_URL:
|
||||
return settings.SPACE_BASE_URL + space_base_path
|
||||
else:
|
||||
return base_origin + space_base_path
|
||||
|
||||
# App Redirection
|
||||
if is_app:
|
||||
|
||||
@@ -6,6 +6,7 @@ from django.conf import settings
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
|
||||
def user_login(request, user, is_app=False, is_admin=False, is_space=False):
|
||||
login(request=request, user=user)
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class SignInAuthEndpoint(View):
|
||||
def post(self, request):
|
||||
next_path = request.POST.get("next_path")
|
||||
|
||||
@@ -18,6 +18,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GitHubOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
# Get host and next path
|
||||
|
||||
@@ -18,6 +18,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GitLabOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
# Get host and next path
|
||||
|
||||
@@ -20,6 +20,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GoogleOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
request.session["host"] = base_host(request=request, is_app=True)
|
||||
@@ -95,7 +96,9 @@ class GoogleCallbackEndpoint(View):
|
||||
# Get the redirection path
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
url = urljoin(base_host, str(validate_next_path(next_path)) if next_path else path)
|
||||
url = urljoin(
|
||||
base_host, str(validate_next_path(next_path)) if next_path else path
|
||||
)
|
||||
return HttpResponseRedirect(url)
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
|
||||
@@ -53,12 +53,14 @@ class ChangePasswordEndpoint(APIView):
|
||||
error_message="MISSING_PASSWORD",
|
||||
payload={"error": "Old password is missing"},
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(
|
||||
exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Get the new password
|
||||
new_password = request.data.get("new_password", False)
|
||||
|
||||
if not new_password:
|
||||
if not new_password:
|
||||
exc = AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["MISSING_PASSWORD"],
|
||||
error_message="MISSING_PASSWORD",
|
||||
@@ -66,7 +68,6 @@ class ChangePasswordEndpoint(APIView):
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
# If the user password is not autoset then we need to check the old passwords
|
||||
if not user.is_password_autoset and not user.check_password(old_password):
|
||||
exc = AuthenticationException(
|
||||
|
||||
@@ -25,6 +25,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class MagicGenerateSpaceEndpoint(APIView):
|
||||
permission_classes = [AllowAny]
|
||||
|
||||
@@ -38,7 +39,6 @@ class MagicGenerateSpaceEndpoint(APIView):
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
email = request.data.get("email", "").strip().lower()
|
||||
try:
|
||||
validate_email(email)
|
||||
|
||||
@@ -459,8 +459,37 @@ def analytic_export_task(email, data, slug):
|
||||
|
||||
csv_buffer = generate_csv_from_rows(rows)
|
||||
send_export_email(email, slug, csv_buffer, rows)
|
||||
logging.getLogger("plane").info("Email sent succesfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return
|
||||
|
||||
|
||||
@shared_task
|
||||
def export_analytics_to_csv_email(data, headers, keys, email, slug):
|
||||
try:
|
||||
"""
|
||||
Prepares a CSV from data and sends it as an email attachment.
|
||||
|
||||
Parameters:
|
||||
- data: List of dictionaries (e.g. from .values())
|
||||
- headers: List of CSV column headers
|
||||
- keys: Keys to extract from each data item (dict)
|
||||
- email: Email address to send to
|
||||
- slug: Used for the filename
|
||||
"""
|
||||
# Prepare rows: header + data rows
|
||||
rows = [headers]
|
||||
for item in data:
|
||||
row = [item.get(key, "") for key in keys]
|
||||
rows.append(row)
|
||||
|
||||
# Generate CSV buffer
|
||||
csv_buffer = generate_csv_from_rows(rows)
|
||||
|
||||
# Send email with CSV attachment
|
||||
send_export_email(email=email, slug=slug, csv_buffer=csv_buffer, rows=rows)
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return
|
||||
|
||||
@@ -12,6 +12,7 @@ from plane.db.models import FileAsset, Page, Issue
|
||||
from plane.utils.exception_logger import log_exception
|
||||
from plane.settings.storage import S3Storage
|
||||
from celery import shared_task
|
||||
from plane.utils.url import normalize_url_path
|
||||
|
||||
|
||||
def get_entity_id_field(entity_type, entity_id):
|
||||
@@ -67,11 +68,14 @@ def sync_with_external_service(entity_name, description_html):
|
||||
"description_html": description_html,
|
||||
"variant": "rich" if entity_name == "PAGE" else "document",
|
||||
}
|
||||
response = requests.post(
|
||||
f"{settings.LIVE_BASE_URL}/convert-document/",
|
||||
json=data,
|
||||
headers=None,
|
||||
)
|
||||
|
||||
live_url = settings.LIVE_URL
|
||||
if not live_url:
|
||||
return {}
|
||||
|
||||
url = normalize_url_path(f"{live_url}/convert-document/")
|
||||
|
||||
response = requests.post(url, json=data, headers=None)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
except requests.RequestException as e:
|
||||
|
||||
@@ -33,6 +33,7 @@ from plane.db.models import (
|
||||
Intake,
|
||||
IntakeIssue,
|
||||
)
|
||||
from plane.db.models.intake import SourceType
|
||||
|
||||
|
||||
def create_project(workspace, user_id):
|
||||
@@ -388,7 +389,7 @@ def create_intake_issues(workspace, project, user_id, intake_issue_count):
|
||||
if status == 0
|
||||
else None
|
||||
),
|
||||
source="in-app",
|
||||
source=SourceType.IN_APP,
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
)
|
||||
|
||||
@@ -309,7 +309,7 @@ def send_email_notification(
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email Sent Successfully")
|
||||
logging.getLogger("plane.worker").info("Email Sent Successfully")
|
||||
|
||||
# Update the logs
|
||||
EmailNotificationLog.objects.filter(
|
||||
@@ -325,7 +325,7 @@ def send_email_notification(
|
||||
release_lock(lock_id=lock_id)
|
||||
return
|
||||
else:
|
||||
logging.getLogger("plane").info("Duplicate email received skipping")
|
||||
logging.getLogger("plane.worker").info("Duplicate email received skipping")
|
||||
return
|
||||
except (Issue.DoesNotExist, User.DoesNotExist):
|
||||
release_lock(lock_id=lock_id)
|
||||
|
||||
@@ -3,34 +3,49 @@ import csv
|
||||
import io
|
||||
import json
|
||||
import zipfile
|
||||
|
||||
from typing import List
|
||||
import boto3
|
||||
from botocore.client import Config
|
||||
from uuid import UUID
|
||||
from datetime import datetime, date
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from openpyxl import Workbook
|
||||
from django.db.models import F, Prefetch
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import ExporterHistory, Issue
|
||||
from plane.db.models import ExporterHistory, Issue, FileAsset, Label, User, IssueComment
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
|
||||
def dateTimeConverter(time):
|
||||
def dateTimeConverter(time: datetime) -> str | None:
|
||||
"""
|
||||
Convert a datetime object to a formatted string.
|
||||
"""
|
||||
if time:
|
||||
return time.strftime("%a, %d %b %Y %I:%M:%S %Z%z")
|
||||
|
||||
|
||||
def dateConverter(time):
|
||||
def dateConverter(time: date) -> str | None:
|
||||
"""
|
||||
Convert a date object to a formatted string.
|
||||
"""
|
||||
if time:
|
||||
return time.strftime("%a, %d %b %Y")
|
||||
|
||||
|
||||
def create_csv_file(data):
|
||||
def create_csv_file(data: List[List[str]]) -> str:
|
||||
"""
|
||||
Create a CSV file from the provided data.
|
||||
"""
|
||||
csv_buffer = io.StringIO()
|
||||
csv_writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
|
||||
|
||||
@@ -41,11 +56,17 @@ def create_csv_file(data):
|
||||
return csv_buffer.getvalue()
|
||||
|
||||
|
||||
def create_json_file(data):
|
||||
def create_json_file(data: List[dict]) -> str:
|
||||
"""
|
||||
Create a JSON file from the provided data.
|
||||
"""
|
||||
return json.dumps(data)
|
||||
|
||||
|
||||
def create_xlsx_file(data):
|
||||
def create_xlsx_file(data: List[List[str]]) -> bytes:
|
||||
"""
|
||||
Create an XLSX file from the provided data.
|
||||
"""
|
||||
workbook = Workbook()
|
||||
sheet = workbook.active
|
||||
|
||||
@@ -58,7 +79,10 @@ def create_xlsx_file(data):
|
||||
return xlsx_buffer.getvalue()
|
||||
|
||||
|
||||
def create_zip_file(files):
|
||||
def create_zip_file(files: List[tuple[str, str | bytes]]) -> io.BytesIO:
|
||||
"""
|
||||
Create a ZIP file from the provided files.
|
||||
"""
|
||||
zip_buffer = io.BytesIO()
|
||||
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
for filename, file_content in files:
|
||||
@@ -68,7 +92,13 @@ def create_zip_file(files):
|
||||
return zip_buffer
|
||||
|
||||
|
||||
def upload_to_s3(zip_file, workspace_id, token_id, slug):
|
||||
# TODO: Change the upload_to_s3 function to use the new storage method with entry in file asset table
|
||||
def upload_to_s3(
|
||||
zip_file: io.BytesIO, workspace_id: UUID, token_id: str, slug: str
|
||||
) -> None:
|
||||
"""
|
||||
Upload a ZIP file to S3 and generate a presigned URL.
|
||||
"""
|
||||
file_name = (
|
||||
f"{workspace_id}/export-{slug}-{token_id[:6]}-{str(timezone.now().date())}.zip"
|
||||
)
|
||||
@@ -150,75 +180,85 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
|
||||
exporter_instance.save(update_fields=["status", "url", "key"])
|
||||
|
||||
|
||||
def generate_table_row(issue):
|
||||
def generate_table_row(issue: dict) -> List[str]:
|
||||
"""
|
||||
Generate a table row from an issue dictionary.
|
||||
"""
|
||||
return [
|
||||
f"""{issue["project__identifier"]}-{issue["sequence_id"]}""",
|
||||
issue["project__name"],
|
||||
f"""{issue["project_identifier"]}-{issue["sequence_id"]}""",
|
||||
issue["project_name"],
|
||||
issue["name"],
|
||||
issue["description_stripped"],
|
||||
issue["state__name"],
|
||||
issue["description"],
|
||||
issue["state_name"],
|
||||
dateConverter(issue["start_date"]),
|
||||
dateConverter(issue["target_date"]),
|
||||
issue["priority"],
|
||||
(
|
||||
f"{issue['created_by__first_name']} {issue['created_by__last_name']}"
|
||||
if issue["created_by__first_name"] and issue["created_by__last_name"]
|
||||
else ""
|
||||
),
|
||||
(
|
||||
f"{issue['assignees__first_name']} {issue['assignees__last_name']}"
|
||||
if issue["assignees__first_name"] and issue["assignees__last_name"]
|
||||
else ""
|
||||
),
|
||||
issue["labels__name"] if issue["labels__name"] else "",
|
||||
issue["issue_cycle__cycle__name"],
|
||||
dateConverter(issue["issue_cycle__cycle__start_date"]),
|
||||
dateConverter(issue["issue_cycle__cycle__end_date"]),
|
||||
issue["issue_module__module__name"],
|
||||
dateConverter(issue["issue_module__module__start_date"]),
|
||||
dateConverter(issue["issue_module__module__target_date"]),
|
||||
issue["created_by"],
|
||||
", ".join(issue["labels"]) if issue["labels"] else "",
|
||||
issue["cycle_name"],
|
||||
issue["cycle_start_date"],
|
||||
issue["cycle_end_date"],
|
||||
", ".join(issue.get("module_name", "")) if issue.get("module_name") else "",
|
||||
dateTimeConverter(issue["created_at"]),
|
||||
dateTimeConverter(issue["updated_at"]),
|
||||
dateTimeConverter(issue["completed_at"]),
|
||||
dateTimeConverter(issue["archived_at"]),
|
||||
(
|
||||
", ".join(
|
||||
[
|
||||
f"{comment['comment']} ({comment['created_at']} by {comment['created_by']})"
|
||||
for comment in issue["comments"]
|
||||
]
|
||||
)
|
||||
if issue["comments"]
|
||||
else ""
|
||||
),
|
||||
issue["estimate"] if issue["estimate"] else "",
|
||||
", ".join(issue["link"]) if issue["link"] else "",
|
||||
", ".join(issue["assignees"]) if issue["assignees"] else "",
|
||||
issue["subscribers_count"] if issue["subscribers_count"] else "",
|
||||
issue["attachment_count"] if issue["attachment_count"] else "",
|
||||
", ".join(issue["attachment_links"]) if issue["attachment_links"] else "",
|
||||
]
|
||||
|
||||
|
||||
def generate_json_row(issue):
|
||||
def generate_json_row(issue: dict) -> dict:
|
||||
"""
|
||||
Generate a JSON row from an issue dictionary.
|
||||
"""
|
||||
return {
|
||||
"ID": f"""{issue["project__identifier"]}-{issue["sequence_id"]}""",
|
||||
"Project": issue["project__name"],
|
||||
"ID": f"""{issue["project_identifier"]}-{issue["sequence_id"]}""",
|
||||
"Project": issue["project_name"],
|
||||
"Name": issue["name"],
|
||||
"Description": issue["description_stripped"],
|
||||
"State": issue["state__name"],
|
||||
"Description": issue["description"],
|
||||
"State": issue["state_name"],
|
||||
"Start Date": dateConverter(issue["start_date"]),
|
||||
"Target Date": dateConverter(issue["target_date"]),
|
||||
"Priority": issue["priority"],
|
||||
"Created By": (
|
||||
f"{issue['created_by__first_name']} {issue['created_by__last_name']}"
|
||||
if issue["created_by__first_name"] and issue["created_by__last_name"]
|
||||
else ""
|
||||
),
|
||||
"Assignee": (
|
||||
f"{issue['assignees__first_name']} {issue['assignees__last_name']}"
|
||||
if issue["assignees__first_name"] and issue["assignees__last_name"]
|
||||
else ""
|
||||
),
|
||||
"Labels": issue["labels__name"] if issue["labels__name"] else "",
|
||||
"Cycle Name": issue["issue_cycle__cycle__name"],
|
||||
"Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]),
|
||||
"Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]),
|
||||
"Module Name": issue["issue_module__module__name"],
|
||||
"Module Start Date": dateConverter(issue["issue_module__module__start_date"]),
|
||||
"Module Target Date": dateConverter(issue["issue_module__module__target_date"]),
|
||||
"Created By": (f"{issue['created_by']}" if issue["created_by"] else ""),
|
||||
"Assignee": issue["assignees"],
|
||||
"Labels": issue["labels"],
|
||||
"Cycle Name": issue["cycle_name"],
|
||||
"Cycle Start Date": issue["cycle_start_date"],
|
||||
"Cycle End Date": issue["cycle_end_date"],
|
||||
"Module Name": issue["module_name"],
|
||||
"Created At": dateTimeConverter(issue["created_at"]),
|
||||
"Updated At": dateTimeConverter(issue["updated_at"]),
|
||||
"Completed At": dateTimeConverter(issue["completed_at"]),
|
||||
"Archived At": dateTimeConverter(issue["archived_at"]),
|
||||
"Comments": issue["comments"],
|
||||
"Estimate": issue["estimate"],
|
||||
"Link": issue["link"],
|
||||
"Subscribers Count": issue["subscribers_count"],
|
||||
"Attachment Count": issue["attachment_count"],
|
||||
"Attachment Links": issue["attachment_links"],
|
||||
}
|
||||
|
||||
|
||||
def update_json_row(rows, row):
|
||||
def update_json_row(rows: List[dict], row: dict) -> None:
|
||||
"""
|
||||
Update the json row with the new assignee and label.
|
||||
"""
|
||||
matched_index = next(
|
||||
(
|
||||
index
|
||||
@@ -247,7 +287,10 @@ def update_json_row(rows, row):
|
||||
rows.append(row)
|
||||
|
||||
|
||||
def update_table_row(rows, row):
|
||||
def update_table_row(rows: List[List[str]], row: List[str]) -> None:
|
||||
"""
|
||||
Update the table row with the new assignee and label.
|
||||
"""
|
||||
matched_index = next(
|
||||
(index for index, existing_row in enumerate(rows) if existing_row[0] == row[0]),
|
||||
None,
|
||||
@@ -269,7 +312,12 @@ def update_table_row(rows, row):
|
||||
rows.append(row)
|
||||
|
||||
|
||||
def generate_csv(header, project_id, issues, files):
|
||||
def generate_csv(
|
||||
header: List[str],
|
||||
project_id: str,
|
||||
issues: List[dict],
|
||||
files: List[tuple[str, str | bytes]],
|
||||
) -> None:
|
||||
"""
|
||||
Generate CSV export for all the passed issues.
|
||||
"""
|
||||
@@ -281,7 +329,15 @@ def generate_csv(header, project_id, issues, files):
|
||||
files.append((f"{project_id}.csv", csv_file))
|
||||
|
||||
|
||||
def generate_json(header, project_id, issues, files):
|
||||
def generate_json(
|
||||
header: List[str],
|
||||
project_id: str,
|
||||
issues: List[dict],
|
||||
files: List[tuple[str, str | bytes]],
|
||||
) -> None:
|
||||
"""
|
||||
Generate JSON export for all the passed issues.
|
||||
"""
|
||||
rows = []
|
||||
for issue in issues:
|
||||
row = generate_json_row(issue)
|
||||
@@ -290,68 +346,169 @@ def generate_json(header, project_id, issues, files):
|
||||
files.append((f"{project_id}.json", json_file))
|
||||
|
||||
|
||||
def generate_xlsx(header, project_id, issues, files):
|
||||
def generate_xlsx(
|
||||
header: List[str],
|
||||
project_id: str,
|
||||
issues: List[dict],
|
||||
files: List[tuple[str, str | bytes]],
|
||||
) -> None:
|
||||
"""
|
||||
Generate XLSX export for all the passed issues.
|
||||
"""
|
||||
rows = [header]
|
||||
for issue in issues:
|
||||
row = generate_table_row(issue)
|
||||
|
||||
update_table_row(rows, row)
|
||||
xlsx_file = create_xlsx_file(rows)
|
||||
files.append((f"{project_id}.xlsx", xlsx_file))
|
||||
|
||||
|
||||
def get_created_by(obj: Issue | IssueComment) -> str:
|
||||
"""
|
||||
Get the created by user for the given object.
|
||||
"""
|
||||
if obj.created_by:
|
||||
return f"{obj.created_by.first_name} {obj.created_by.last_name}"
|
||||
return ""
|
||||
|
||||
|
||||
@shared_task
|
||||
def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, slug):
|
||||
def issue_export_task(
|
||||
provider: str,
|
||||
workspace_id: UUID,
|
||||
project_ids: List[str],
|
||||
token_id: str,
|
||||
multiple: bool,
|
||||
slug: str,
|
||||
):
|
||||
"""
|
||||
Export issues from the workspace.
|
||||
provider (str): The provider to export the issues to csv | json | xlsx.
|
||||
token_id (str): The export object token id.
|
||||
multiple (bool): Whether to export the issues to multiple files per project.
|
||||
"""
|
||||
try:
|
||||
exporter_instance = ExporterHistory.objects.get(token=token_id)
|
||||
exporter_instance.status = "processing"
|
||||
exporter_instance.save(update_fields=["status"])
|
||||
|
||||
# Base query to get the issues
|
||||
workspace_issues = (
|
||||
(
|
||||
Issue.objects.filter(
|
||||
workspace__id=workspace_id,
|
||||
project_id__in=project_ids,
|
||||
project__project_projectmember__member=exporter_instance.initiated_by_id,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__archived_at__isnull=True,
|
||||
)
|
||||
.select_related("project", "workspace", "state", "parent", "created_by")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_cycle__cycle", "issue_module__module"
|
||||
)
|
||||
.values(
|
||||
"id",
|
||||
"project__identifier",
|
||||
"project__name",
|
||||
"project__id",
|
||||
"sequence_id",
|
||||
"name",
|
||||
"description_stripped",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"state__name",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"completed_at",
|
||||
"archived_at",
|
||||
"issue_cycle__cycle__name",
|
||||
"issue_cycle__cycle__start_date",
|
||||
"issue_cycle__cycle__end_date",
|
||||
"issue_module__module__name",
|
||||
"issue_module__module__start_date",
|
||||
"issue_module__module__target_date",
|
||||
"created_by__first_name",
|
||||
"created_by__last_name",
|
||||
"assignees__first_name",
|
||||
"assignees__last_name",
|
||||
"labels__name",
|
||||
)
|
||||
Issue.objects.filter(
|
||||
workspace__id=workspace_id,
|
||||
project_id__in=project_ids,
|
||||
project__project_projectmember__member=exporter_instance.initiated_by_id,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__archived_at__isnull=True,
|
||||
)
|
||||
.select_related(
|
||||
"project",
|
||||
"workspace",
|
||||
"state",
|
||||
"parent",
|
||||
"created_by",
|
||||
"estimate_point",
|
||||
)
|
||||
.prefetch_related(
|
||||
"labels",
|
||||
"issue_cycle__cycle",
|
||||
"issue_module__module",
|
||||
"issue_comments",
|
||||
"assignees",
|
||||
Prefetch(
|
||||
"assignees",
|
||||
queryset=User.objects.only("first_name", "last_name").distinct(),
|
||||
to_attr="assignee_details",
|
||||
),
|
||||
Prefetch(
|
||||
"labels",
|
||||
queryset=Label.objects.only("name").distinct(),
|
||||
to_attr="label_details",
|
||||
),
|
||||
"issue_subscribers",
|
||||
"issue_link",
|
||||
)
|
||||
.order_by("project__identifier", "sequence_id")
|
||||
.distinct()
|
||||
)
|
||||
# CSV header
|
||||
|
||||
# Get the attachments for the issues
|
||||
file_assets = FileAsset.objects.filter(
|
||||
issue_id__in=workspace_issues.values_list("id", flat=True),
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
).annotate(work_item_id=F("issue_id"), asset_id=F("id"))
|
||||
|
||||
# Create a dictionary to store the attachments for the issues
|
||||
attachment_dict = defaultdict(list)
|
||||
for asset in file_assets:
|
||||
attachment_dict[asset.work_item_id].append(asset.asset_id)
|
||||
|
||||
# Create a list to store the issues data
|
||||
issues_data = []
|
||||
|
||||
# Iterate over the issues
|
||||
for issue in workspace_issues:
|
||||
attachments = attachment_dict.get(issue.id, [])
|
||||
|
||||
issue_data = {
|
||||
"id": issue.id,
|
||||
"project_identifier": issue.project.identifier,
|
||||
"project_name": issue.project.name,
|
||||
"project_id": issue.project.id,
|
||||
"sequence_id": issue.sequence_id,
|
||||
"name": issue.name,
|
||||
"description": issue.description_stripped,
|
||||
"priority": issue.priority,
|
||||
"start_date": issue.start_date,
|
||||
"target_date": issue.target_date,
|
||||
"state_name": issue.state.name if issue.state else None,
|
||||
"created_at": issue.created_at,
|
||||
"updated_at": issue.updated_at,
|
||||
"completed_at": issue.completed_at,
|
||||
"archived_at": issue.archived_at,
|
||||
"module_name": [
|
||||
module.module.name for module in issue.issue_module.all()
|
||||
],
|
||||
"created_by": get_created_by(issue),
|
||||
"labels": [label.name for label in issue.label_details],
|
||||
"comments": [
|
||||
{
|
||||
"comment": comment.comment_stripped,
|
||||
"created_at": dateConverter(comment.created_at),
|
||||
"created_by": get_created_by(comment),
|
||||
}
|
||||
for comment in issue.issue_comments.all()
|
||||
],
|
||||
"estimate": issue.estimate_point.value
|
||||
if issue.estimate_point and issue.estimate_point.value
|
||||
else "",
|
||||
"link": [link.url for link in issue.issue_link.all()],
|
||||
"assignees": [
|
||||
f"{assignee.first_name} {assignee.last_name}"
|
||||
for assignee in issue.assignee_details
|
||||
],
|
||||
"subscribers_count": issue.issue_subscribers.count(),
|
||||
"attachment_count": len(attachments),
|
||||
"attachment_links": [
|
||||
f"/api/assets/v2/workspaces/{issue.workspace.slug}/projects/{issue.project_id}/issues/{issue.id}/attachments/{asset}/"
|
||||
for asset in attachments
|
||||
],
|
||||
}
|
||||
|
||||
# Get Cycles data for the issue
|
||||
cycle = issue.issue_cycle.last()
|
||||
if cycle:
|
||||
# Update cycle data
|
||||
issue_data["cycle_name"] = cycle.cycle.name
|
||||
issue_data["cycle_start_date"] = dateConverter(cycle.cycle.start_date)
|
||||
issue_data["cycle_end_date"] = dateConverter(cycle.cycle.end_date)
|
||||
else:
|
||||
issue_data["cycle_name"] = ""
|
||||
issue_data["cycle_start_date"] = ""
|
||||
issue_data["cycle_end_date"] = ""
|
||||
|
||||
issues_data.append(issue_data)
|
||||
|
||||
# CSV header
|
||||
header = [
|
||||
"ID",
|
||||
"Project",
|
||||
@@ -362,20 +519,25 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
|
||||
"Target Date",
|
||||
"Priority",
|
||||
"Created By",
|
||||
"Assignee",
|
||||
"Labels",
|
||||
"Cycle Name",
|
||||
"Cycle Start Date",
|
||||
"Cycle End Date",
|
||||
"Module Name",
|
||||
"Module Start Date",
|
||||
"Module Target Date",
|
||||
"Created At",
|
||||
"Updated At",
|
||||
"Completed At",
|
||||
"Archived At",
|
||||
"Comments",
|
||||
"Estimate",
|
||||
"Link",
|
||||
"Assignees",
|
||||
"Subscribers Count",
|
||||
"Attachment Count",
|
||||
"Attachment Links",
|
||||
]
|
||||
|
||||
# Map the provider to the function
|
||||
EXPORTER_MAPPER = {
|
||||
"csv": generate_csv,
|
||||
"json": generate_json,
|
||||
@@ -384,8 +546,13 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
|
||||
|
||||
files = []
|
||||
if multiple:
|
||||
project_dict = defaultdict(list)
|
||||
for issue in issues_data:
|
||||
project_dict[str(issue["project_id"])].append(issue)
|
||||
|
||||
for project_id in project_ids:
|
||||
issues = workspace_issues.filter(project__id=project_id)
|
||||
issues = project_dict.get(str(project_id), [])
|
||||
|
||||
exporter = EXPORTER_MAPPER.get(provider)
|
||||
if exporter is not None:
|
||||
exporter(header, project_id, issues, files)
|
||||
@@ -393,7 +560,7 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
|
||||
else:
|
||||
exporter = EXPORTER_MAPPER.get(provider)
|
||||
if exporter is not None:
|
||||
exporter(header, workspace_id, workspace_issues, files)
|
||||
exporter(header, workspace_id, issues_data, files)
|
||||
|
||||
zip_buffer = create_zip_file(files)
|
||||
upload_to_s3(zip_buffer, workspace_id, token_id, slug)
|
||||
|
||||
@@ -63,7 +63,7 @@ def forgot_password(first_name, email, uidb64, token, current_site):
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -307,6 +307,10 @@ def track_labels(
|
||||
|
||||
# Set of newly added labels
|
||||
for added_label in added_labels:
|
||||
# validate uuids
|
||||
if not is_valid_uuid(added_label):
|
||||
continue
|
||||
|
||||
label = Label.objects.get(pk=added_label)
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
@@ -327,6 +331,10 @@ def track_labels(
|
||||
|
||||
# Set of dropped labels
|
||||
for dropped_label in dropped_labels:
|
||||
# validate uuids
|
||||
if not is_valid_uuid(dropped_label):
|
||||
continue
|
||||
|
||||
label = Label.objects.get(pk=dropped_label)
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
@@ -373,6 +381,10 @@ def track_assignees(
|
||||
|
||||
bulk_subscribers = []
|
||||
for added_asignee in added_assignees:
|
||||
# validate uuids
|
||||
if not is_valid_uuid(added_asignee):
|
||||
continue
|
||||
|
||||
assignee = User.objects.get(pk=added_asignee)
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
@@ -406,6 +418,10 @@ def track_assignees(
|
||||
)
|
||||
|
||||
for dropped_assignee in dropped_assginees:
|
||||
# validate uuids
|
||||
if not is_valid_uuid(dropped_assignee):
|
||||
continue
|
||||
|
||||
assignee = User.objects.get(pk=dropped_assignee)
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
@@ -466,7 +482,7 @@ def track_estimate_points(
|
||||
),
|
||||
old_value=old_estimate.value if old_estimate else None,
|
||||
new_value=new_estimate.value if new_estimate else None,
|
||||
field="estimate_point",
|
||||
field="estimate_" + new_estimate.estimate.type,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
comment="updated the estimate point to ",
|
||||
@@ -1634,40 +1650,6 @@ def issue_activity(
|
||||
|
||||
# Save all the values to database
|
||||
issue_activities_created = IssueActivity.objects.bulk_create(issue_activities)
|
||||
# Post the updates to segway for integrations and webhooks
|
||||
if len(issue_activities_created):
|
||||
for activity in issue_activities_created:
|
||||
webhook_activity.delay(
|
||||
event=(
|
||||
"issue_comment"
|
||||
if activity.field == "comment"
|
||||
else "intake_issue"
|
||||
if intake
|
||||
else "issue"
|
||||
),
|
||||
event_id=(
|
||||
activity.issue_comment_id
|
||||
if activity.field == "comment"
|
||||
else intake
|
||||
if intake
|
||||
else activity.issue_id
|
||||
),
|
||||
verb=activity.verb,
|
||||
field=(
|
||||
"description" if activity.field == "comment" else activity.field
|
||||
),
|
||||
old_value=(
|
||||
activity.old_value if activity.old_value != "" else None
|
||||
),
|
||||
new_value=(
|
||||
activity.new_value if activity.new_value != "" else None
|
||||
),
|
||||
actor_id=activity.actor_id,
|
||||
current_site=origin,
|
||||
slug=activity.workspace.slug,
|
||||
old_identifier=activity.old_identifier,
|
||||
new_identifier=activity.new_identifier,
|
||||
)
|
||||
|
||||
if notification:
|
||||
notifications.delay(
|
||||
|
||||
@@ -53,7 +53,7 @@ def magic_link(email, key, token):
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -80,7 +80,7 @@ def project_add_user_email(current_site, project_member_id, invitor_id):
|
||||
# Send the email
|
||||
msg.send()
|
||||
# Log the success
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -76,7 +76,7 @@ def project_invitation(email, project_id, token, current_site, invitor):
|
||||
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except (Project.DoesNotExist, ProjectMemberInvite.DoesNotExist):
|
||||
return
|
||||
|
||||
@@ -58,7 +58,7 @@ def user_activation_email(current_site, user_id):
|
||||
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -60,7 +60,7 @@ def user_deactivation_email(current_site, user_id):
|
||||
# Attach HTML content
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -5,6 +5,7 @@ import logging
|
||||
import uuid
|
||||
|
||||
import requests
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
@@ -70,150 +71,89 @@ MODEL_MAPPER = {
|
||||
}
|
||||
|
||||
|
||||
def get_model_data(event, event_id, many=False):
|
||||
logger = logging.getLogger("plane.worker")
|
||||
|
||||
|
||||
def get_model_data(
|
||||
event: str, event_id: Union[str, List[str]], many: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Retrieve and serialize model data based on the event type.
|
||||
|
||||
Args:
|
||||
event (str): The type of event/model to retrieve data for
|
||||
event_id (Union[str, List[str]]): The ID or list of IDs of the model instance(s)
|
||||
many (bool): Whether to retrieve multiple instances
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Serialized model data
|
||||
|
||||
Raises:
|
||||
ValueError: If serializer is not found for the event
|
||||
ObjectDoesNotExist: If model instance is not found
|
||||
"""
|
||||
model = MODEL_MAPPER.get(event)
|
||||
if many:
|
||||
queryset = model.objects.filter(pk__in=event_id)
|
||||
else:
|
||||
queryset = model.objects.get(pk=event_id)
|
||||
serializer = SERIALIZER_MAPPER.get(event)
|
||||
return serializer(queryset, many=many).data
|
||||
if model is None:
|
||||
raise ValueError(f"Model not found for event: {event}")
|
||||
|
||||
|
||||
@shared_task(
|
||||
bind=True,
|
||||
autoretry_for=(requests.RequestException,),
|
||||
retry_backoff=600,
|
||||
max_retries=5,
|
||||
retry_jitter=True,
|
||||
)
|
||||
def webhook_task(self, webhook, slug, event, event_data, action, current_site):
|
||||
try:
|
||||
webhook = Webhook.objects.get(id=webhook, workspace__slug=slug)
|
||||
if many:
|
||||
queryset = model.objects.filter(pk__in=event_id)
|
||||
else:
|
||||
queryset = model.objects.get(pk=event_id)
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": "Autopilot",
|
||||
"X-Plane-Delivery": str(uuid.uuid4()),
|
||||
"X-Plane-Event": event,
|
||||
}
|
||||
serializer = SERIALIZER_MAPPER.get(event)
|
||||
if serializer is None:
|
||||
raise ValueError(f"Serializer not found for event: {event}")
|
||||
|
||||
# # Your secret key
|
||||
event_data = (
|
||||
json.loads(json.dumps(event_data, cls=DjangoJSONEncoder))
|
||||
if event_data is not None
|
||||
else None
|
||||
)
|
||||
|
||||
action = {
|
||||
"POST": "create",
|
||||
"PATCH": "update",
|
||||
"PUT": "update",
|
||||
"DELETE": "delete",
|
||||
}.get(action, action)
|
||||
|
||||
payload = {
|
||||
"event": event,
|
||||
"action": action,
|
||||
"webhook_id": str(webhook.id),
|
||||
"workspace_id": str(webhook.workspace_id),
|
||||
"data": event_data,
|
||||
}
|
||||
|
||||
# Use HMAC for generating signature
|
||||
if webhook.secret_key:
|
||||
hmac_signature = hmac.new(
|
||||
webhook.secret_key.encode("utf-8"),
|
||||
json.dumps(payload).encode("utf-8"),
|
||||
hashlib.sha256,
|
||||
)
|
||||
signature = hmac_signature.hexdigest()
|
||||
headers["X-Plane-Signature"] = signature
|
||||
|
||||
# Send the webhook event
|
||||
response = requests.post(webhook.url, headers=headers, json=payload, timeout=30)
|
||||
|
||||
# Log the webhook request
|
||||
WebhookLog.objects.create(
|
||||
workspace_id=str(webhook.workspace_id),
|
||||
webhook=str(webhook.id),
|
||||
event_type=str(event),
|
||||
request_method=str(action),
|
||||
request_headers=str(headers),
|
||||
request_body=str(payload),
|
||||
response_status=str(response.status_code),
|
||||
response_headers=str(response.headers),
|
||||
response_body=str(response.text),
|
||||
retry_count=str(self.request.retries),
|
||||
)
|
||||
|
||||
except Webhook.DoesNotExist:
|
||||
return
|
||||
except requests.RequestException as e:
|
||||
# Log the failed webhook request
|
||||
WebhookLog.objects.create(
|
||||
workspace_id=str(webhook.workspace_id),
|
||||
webhook=str(webhook.id),
|
||||
event_type=str(event),
|
||||
request_method=str(action),
|
||||
request_headers=str(headers),
|
||||
request_body=str(payload),
|
||||
response_status=500,
|
||||
response_headers="",
|
||||
response_body=str(e),
|
||||
retry_count=str(self.request.retries),
|
||||
)
|
||||
# Retry logic
|
||||
if self.request.retries >= self.max_retries:
|
||||
Webhook.objects.filter(pk=webhook.id).update(is_active=False)
|
||||
if webhook:
|
||||
# send email for the deactivation of the webhook
|
||||
send_webhook_deactivation_email(
|
||||
webhook_id=webhook.id,
|
||||
receiver_id=webhook.created_by_id,
|
||||
reason=str(e),
|
||||
current_site=current_site,
|
||||
)
|
||||
return
|
||||
raise requests.RequestException()
|
||||
|
||||
except Exception as e:
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
log_exception(e)
|
||||
return
|
||||
return serializer(queryset, many=many).data
|
||||
except ObjectDoesNotExist:
|
||||
raise ObjectDoesNotExist(f"No {event} found with id: {event_id}")
|
||||
|
||||
|
||||
@shared_task
|
||||
def send_webhook_deactivation_email(webhook_id, receiver_id, current_site, reason):
|
||||
# Get email configurations
|
||||
(
|
||||
EMAIL_HOST,
|
||||
EMAIL_HOST_USER,
|
||||
EMAIL_HOST_PASSWORD,
|
||||
EMAIL_PORT,
|
||||
EMAIL_USE_TLS,
|
||||
EMAIL_USE_SSL,
|
||||
EMAIL_FROM,
|
||||
) = get_email_configuration()
|
||||
|
||||
receiver = User.objects.get(pk=receiver_id)
|
||||
webhook = Webhook.objects.get(pk=webhook_id)
|
||||
subject = "Webhook Deactivated"
|
||||
message = f"Webhook {webhook.url} has been deactivated due to failed requests."
|
||||
|
||||
# Send the mail
|
||||
context = {
|
||||
"email": receiver.email,
|
||||
"message": message,
|
||||
"webhook_url": f"{current_site}/{str(webhook.workspace.slug)}/settings/webhooks/{str(webhook.id)}",
|
||||
}
|
||||
html_content = render_to_string(
|
||||
"emails/notifications/webhook-deactivate.html", context
|
||||
)
|
||||
text_content = strip_tags(html_content)
|
||||
def send_webhook_deactivation_email(
|
||||
webhook_id: str, receiver_id: str, current_site: str, reason: str
|
||||
) -> None:
|
||||
"""
|
||||
Send an email notification when a webhook is deactivated.
|
||||
|
||||
Args:
|
||||
webhook_id (str): ID of the deactivated webhook
|
||||
receiver_id (str): ID of the user to receive the notification
|
||||
current_site (str): Current site URL
|
||||
reason (str): Reason for webhook deactivation
|
||||
"""
|
||||
try:
|
||||
(
|
||||
EMAIL_HOST,
|
||||
EMAIL_HOST_USER,
|
||||
EMAIL_HOST_PASSWORD,
|
||||
EMAIL_PORT,
|
||||
EMAIL_USE_TLS,
|
||||
EMAIL_USE_SSL,
|
||||
EMAIL_FROM,
|
||||
) = get_email_configuration()
|
||||
|
||||
receiver = User.objects.get(pk=receiver_id)
|
||||
webhook = Webhook.objects.get(pk=webhook_id)
|
||||
|
||||
# Get the webhook payload
|
||||
subject = "Webhook Deactivated"
|
||||
message = f"Webhook {webhook.url} has been deactivated due to failed requests."
|
||||
|
||||
# Send the mail
|
||||
context = {
|
||||
"email": receiver.email,
|
||||
"message": message,
|
||||
"webhook_url": f"{current_site}/{str(webhook.workspace.slug)}/settings/webhooks/{str(webhook.id)}",
|
||||
}
|
||||
html_content = render_to_string(
|
||||
"emails/notifications/webhook-deactivate.html", context
|
||||
)
|
||||
text_content = strip_tags(html_content)
|
||||
|
||||
# Set the email connection
|
||||
connection = get_connection(
|
||||
host=EMAIL_HOST,
|
||||
port=int(EMAIL_PORT),
|
||||
@@ -223,6 +163,7 @@ def send_webhook_deactivation_email(webhook_id, receiver_id, current_site, reaso
|
||||
use_ssl=EMAIL_USE_SSL == "1",
|
||||
)
|
||||
|
||||
# Create the email message
|
||||
msg = EmailMultiAlternatives(
|
||||
subject=subject,
|
||||
body=text_content,
|
||||
@@ -232,11 +173,10 @@ def send_webhook_deactivation_email(webhook_id, receiver_id, current_site, reaso
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
return
|
||||
logger.info("Email sent successfully.")
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return
|
||||
logger.error(f"Failed to send email: {e}")
|
||||
|
||||
|
||||
@shared_task(
|
||||
@@ -247,10 +187,29 @@ def send_webhook_deactivation_email(webhook_id, receiver_id, current_site, reaso
|
||||
retry_jitter=True,
|
||||
)
|
||||
def webhook_send_task(
|
||||
self, webhook, slug, event, event_data, action, current_site, activity
|
||||
):
|
||||
self,
|
||||
webhook_id: str,
|
||||
slug: str,
|
||||
event: str,
|
||||
event_data: Optional[Dict[str, Any]],
|
||||
action: str,
|
||||
current_site: str,
|
||||
activity: Optional[Dict[str, Any]],
|
||||
) -> None:
|
||||
"""
|
||||
Send webhook notifications to configured endpoints.
|
||||
|
||||
Args:
|
||||
webhook (str): Webhook ID
|
||||
slug (str): Workspace slug
|
||||
event (str): Event type
|
||||
event_data (Optional[Dict[str, Any]]): Event data to be sent
|
||||
action (str): HTTP method/action
|
||||
current_site (str): Current site URL
|
||||
activity (Optional[Dict[str, Any]]): Activity data
|
||||
"""
|
||||
try:
|
||||
webhook = Webhook.objects.get(id=webhook, workspace__slug=slug)
|
||||
webhook = Webhook.objects.get(id=webhook_id, workspace__slug=slug)
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
@@ -297,7 +256,12 @@ def webhook_send_task(
|
||||
)
|
||||
signature = hmac_signature.hexdigest()
|
||||
headers["X-Plane-Signature"] = signature
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
logger.error(f"Failed to send webhook: {e}")
|
||||
return
|
||||
|
||||
try:
|
||||
# Send the webhook event
|
||||
response = requests.post(webhook.url, headers=headers, json=payload, timeout=30)
|
||||
|
||||
@@ -314,7 +278,7 @@ def webhook_send_task(
|
||||
response_body=str(response.text),
|
||||
retry_count=str(self.request.retries),
|
||||
)
|
||||
|
||||
logger.info(f"Webhook {webhook.id} sent successfully")
|
||||
except requests.RequestException as e:
|
||||
# Log the failed webhook request
|
||||
WebhookLog.objects.create(
|
||||
@@ -329,12 +293,13 @@ def webhook_send_task(
|
||||
response_body=str(e),
|
||||
retry_count=str(self.request.retries),
|
||||
)
|
||||
logger.error(f"Webhook {webhook.id} failed with error: {e}")
|
||||
# Retry logic
|
||||
if self.request.retries >= self.max_retries:
|
||||
Webhook.objects.filter(pk=webhook.id).update(is_active=False)
|
||||
if webhook:
|
||||
# send email for the deactivation of the webhook
|
||||
send_webhook_deactivation_email(
|
||||
send_webhook_deactivation_email.delay(
|
||||
webhook_id=webhook.id,
|
||||
receiver_id=webhook.created_by_id,
|
||||
reason=str(e),
|
||||
@@ -344,26 +309,50 @@ def webhook_send_task(
|
||||
raise requests.RequestException()
|
||||
|
||||
except Exception as e:
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
log_exception(e)
|
||||
return
|
||||
|
||||
|
||||
@shared_task
|
||||
def webhook_activity(
|
||||
event,
|
||||
verb,
|
||||
field,
|
||||
old_value,
|
||||
new_value,
|
||||
actor_id,
|
||||
slug,
|
||||
current_site,
|
||||
event_id,
|
||||
old_identifier,
|
||||
new_identifier,
|
||||
):
|
||||
event: str,
|
||||
verb: str,
|
||||
field: Optional[str],
|
||||
old_value: Any,
|
||||
new_value: Any,
|
||||
actor_id: str | uuid.UUID,
|
||||
slug: str,
|
||||
current_site: str,
|
||||
event_id: str | uuid.UUID,
|
||||
old_identifier: Optional[str],
|
||||
new_identifier: Optional[str],
|
||||
) -> None:
|
||||
"""
|
||||
Process and send webhook notifications for various activities in the system.
|
||||
|
||||
This task filters relevant webhooks based on the event type and sends notifications
|
||||
to all active webhooks for the workspace.
|
||||
|
||||
Args:
|
||||
event (str): Type of event (project, issue, module, cycle, issue_comment)
|
||||
verb (str): Action performed (created, updated, deleted)
|
||||
field (Optional[str]): Name of the field that was changed
|
||||
old_value (Any): Previous value of the field
|
||||
new_value (Any): New value of the field
|
||||
actor_id (str | uuid.UUID): ID of the user who performed the action
|
||||
slug (str): Workspace slug
|
||||
current_site (str): Current site URL
|
||||
event_id (str | uuid.UUID): ID of the event object
|
||||
old_identifier (Optional[str]): Previous identifier if any
|
||||
new_identifier (Optional[str]): New identifier if any
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
Note:
|
||||
The function silently returns on ObjectDoesNotExist exceptions to handle
|
||||
race conditions where objects might have been deleted.
|
||||
"""
|
||||
try:
|
||||
webhooks = Webhook.objects.filter(workspace__slug=slug, is_active=True)
|
||||
|
||||
@@ -384,7 +373,7 @@ def webhook_activity(
|
||||
|
||||
for webhook in webhooks:
|
||||
webhook_send_task.delay(
|
||||
webhook=webhook.id,
|
||||
webhook_id=webhook.id,
|
||||
slug=slug,
|
||||
event=event,
|
||||
event_data=(
|
||||
|
||||
185
apiserver/plane/bgtasks/work_item_link_task.py
Normal file
185
apiserver/plane/bgtasks/work_item_link_task.py
Normal file
@@ -0,0 +1,185 @@
|
||||
# Python imports
|
||||
import logging
|
||||
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse, urljoin
|
||||
import base64
|
||||
import ipaddress
|
||||
from typing import Dict, Any
|
||||
from typing import Optional
|
||||
from plane.db.models import IssueLink
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
logger = logging.getLogger("plane.worker")
|
||||
|
||||
|
||||
DEFAULT_FAVICON = "PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyNCIgaGVpZ2h0PSIyNCIgdmlld0JveD0iMCAwIDI0IDI0IiBmaWxsPSJub25lIiBzdHJva2U9ImN1cnJlbnRDb2xvciIgc3Ryb2tlLXdpZHRoPSIyIiBzdHJva2UtbGluZWNhcD0icm91bmQiIHN0cm9rZS1saW5lam9pbj0icm91bmQiIGNsYXNzPSJsdWNpZGUgbHVjaWRlLWxpbmstaWNvbiBsdWNpZGUtbGluayI+PHBhdGggZD0iTTEwIDEzYTUgNSAwIDAgMCA3LjU0LjU0bDMtM2E1IDUgMCAwIDAtNy4wNy03LjA3bC0xLjcyIDEuNzEiLz48cGF0aCBkPSJNMTQgMTFhNSA1IDAgMCAwLTcuNTQtLjU0bC0zIDNhNSA1IDAgMCAwIDcuMDcgNy4wN2wxLjcxLTEuNzEiLz48L3N2Zz4=" # noqa: E501
|
||||
|
||||
|
||||
@shared_task
|
||||
def crawl_work_item_link_title(id: str, url: str) -> None:
|
||||
meta_data = crawl_work_item_link_title_and_favicon(url)
|
||||
issue_link = IssueLink.objects.get(id=id)
|
||||
|
||||
issue_link.metadata = meta_data
|
||||
|
||||
issue_link.save()
|
||||
|
||||
|
||||
def crawl_work_item_link_title_and_favicon(url: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Crawls a URL to extract the title and favicon.
|
||||
|
||||
Args:
|
||||
url (str): The URL to crawl
|
||||
|
||||
Returns:
|
||||
str: JSON string containing title and base64-encoded favicon
|
||||
"""
|
||||
try:
|
||||
# Prevent access to private IP ranges
|
||||
parsed = urlparse(url)
|
||||
|
||||
try:
|
||||
ip = ipaddress.ip_address(parsed.hostname)
|
||||
if ip.is_private or ip.is_loopback or ip.is_reserved:
|
||||
raise ValueError("Access to private/internal networks is not allowed")
|
||||
except ValueError:
|
||||
# Not an IP address, continue with domain validation
|
||||
pass
|
||||
|
||||
# Set up headers to mimic a real browser
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36" # noqa: E501
|
||||
}
|
||||
|
||||
# Fetch the main page
|
||||
response = requests.get(url, headers=headers, timeout=2)
|
||||
|
||||
response.raise_for_status()
|
||||
|
||||
# Parse HTML
|
||||
soup = BeautifulSoup(response.content, "html.parser")
|
||||
|
||||
# Extract title
|
||||
title_tag = soup.find("title")
|
||||
title = title_tag.get_text().strip() if title_tag else None
|
||||
|
||||
# Fetch and encode favicon
|
||||
favicon_base64 = fetch_and_encode_favicon(headers, soup, url)
|
||||
|
||||
# Prepare result
|
||||
result = {
|
||||
"title": title,
|
||||
"favicon": favicon_base64["favicon_base64"],
|
||||
"url": url,
|
||||
"favicon_url": favicon_base64["favicon_url"],
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
except requests.RequestException as e:
|
||||
log_exception(e)
|
||||
return {
|
||||
"error": f"Request failed: {str(e)}",
|
||||
"title": None,
|
||||
"favicon": None,
|
||||
"url": url,
|
||||
}
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return {
|
||||
"error": f"Unexpected error: {str(e)}",
|
||||
"title": None,
|
||||
"favicon": None,
|
||||
"url": url,
|
||||
}
|
||||
|
||||
|
||||
def find_favicon_url(soup: BeautifulSoup, base_url: str) -> Optional[str]:
|
||||
"""
|
||||
Find the favicon URL from HTML soup.
|
||||
|
||||
Args:
|
||||
soup: BeautifulSoup object
|
||||
base_url: Base URL for resolving relative paths
|
||||
|
||||
Returns:
|
||||
str: Absolute URL to favicon or None
|
||||
"""
|
||||
# Look for various favicon link tags
|
||||
favicon_selectors = [
|
||||
'link[rel="icon"]',
|
||||
'link[rel="shortcut icon"]',
|
||||
'link[rel="apple-touch-icon"]',
|
||||
'link[rel="apple-touch-icon-precomposed"]',
|
||||
]
|
||||
|
||||
for selector in favicon_selectors:
|
||||
favicon_tag = soup.select_one(selector)
|
||||
if favicon_tag and favicon_tag.get("href"):
|
||||
return urljoin(base_url, favicon_tag["href"])
|
||||
|
||||
# Fallback to /favicon.ico
|
||||
parsed_url = urlparse(base_url)
|
||||
fallback_url = f"{parsed_url.scheme}://{parsed_url.netloc}/favicon.ico"
|
||||
|
||||
# Check if fallback exists
|
||||
try:
|
||||
response = requests.head(fallback_url, timeout=2)
|
||||
response.raise_for_status()
|
||||
if response.status_code == 200:
|
||||
return fallback_url
|
||||
except requests.RequestException as e:
|
||||
log_exception(e)
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def fetch_and_encode_favicon(
|
||||
headers: Dict[str, str], soup: BeautifulSoup, url: str
|
||||
) -> Optional[Dict[str, str]]:
|
||||
"""
|
||||
Fetch favicon and encode it as base64.
|
||||
|
||||
Args:
|
||||
favicon_url: URL to the favicon
|
||||
headers: Request headers
|
||||
|
||||
Returns:
|
||||
str: Base64 encoded favicon with data URI prefix or None
|
||||
"""
|
||||
try:
|
||||
favicon_url = find_favicon_url(soup, url)
|
||||
if favicon_url is None:
|
||||
return {
|
||||
"favicon_url": None,
|
||||
"favicon_base64": f"data:image/svg+xml;base64,{DEFAULT_FAVICON}",
|
||||
}
|
||||
|
||||
response = requests.get(favicon_url, headers=headers, timeout=2)
|
||||
response.raise_for_status()
|
||||
|
||||
# Get content type
|
||||
content_type = response.headers.get("content-type", "image/x-icon")
|
||||
|
||||
# Convert to base64
|
||||
favicon_base64 = base64.b64encode(response.content).decode("utf-8")
|
||||
|
||||
# Return as data URI
|
||||
return {
|
||||
"favicon_url": favicon_url,
|
||||
"favicon_base64": f"data:{content_type};base64,{favicon_base64}",
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch favicon: {e}")
|
||||
return {
|
||||
"favicon_url": None,
|
||||
"favicon_base64": f"data:image/svg+xml;base64,{DEFAULT_FAVICON}",
|
||||
}
|
||||
@@ -78,7 +78,7 @@ def workspace_invitation(email, workspace_id, token, current_site, inviter):
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully")
|
||||
return
|
||||
except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist):
|
||||
return
|
||||
|
||||
319
apiserver/plane/bgtasks/workspace_seed_task.py
Normal file
319
apiserver/plane/bgtasks/workspace_seed_task.py
Normal file
@@ -0,0 +1,319 @@
|
||||
# Python imports
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
from typing import Dict
|
||||
import logging
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import (
|
||||
Workspace,
|
||||
WorkspaceMember,
|
||||
Project,
|
||||
ProjectMember,
|
||||
IssueUserProperty,
|
||||
State,
|
||||
Label,
|
||||
Issue,
|
||||
IssueLabel,
|
||||
IssueSequence,
|
||||
IssueActivity,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("plane.worker")
|
||||
|
||||
|
||||
def read_seed_file(filename):
|
||||
"""
|
||||
Read a JSON file from the seed directory.
|
||||
|
||||
Args:
|
||||
filename (str): Name of the JSON file to read
|
||||
|
||||
Returns:
|
||||
dict: Contents of the JSON file
|
||||
"""
|
||||
file_path = os.path.join(settings.SEED_DIR, "data", filename)
|
||||
try:
|
||||
with open(file_path, "r") as file:
|
||||
return json.load(file)
|
||||
except FileNotFoundError:
|
||||
logger.error(f"Seed file {filename} not found in {settings.SEED_DIR}/data")
|
||||
return None
|
||||
except json.JSONDecodeError:
|
||||
logger.error(f"Error decoding JSON from {filename}")
|
||||
return None
|
||||
|
||||
|
||||
def create_project_and_member(workspace: Workspace) -> Dict[int, uuid.UUID]:
|
||||
"""Creates a project and associated members for a workspace.
|
||||
|
||||
Creates a new project using the workspace name and sets up all necessary
|
||||
member associations and user properties.
|
||||
|
||||
Args:
|
||||
workspace: The workspace to create the project in
|
||||
|
||||
Returns:
|
||||
A mapping of seed project IDs to actual project IDs
|
||||
"""
|
||||
project_seeds = read_seed_file("projects.json")
|
||||
project_identifier = "".join(ch for ch in workspace.name if ch.isalnum())[:5]
|
||||
|
||||
# Create members
|
||||
workspace_members = WorkspaceMember.objects.filter(workspace=workspace).values(
|
||||
"member_id", "role"
|
||||
)
|
||||
|
||||
projects_map: Dict[int, uuid.UUID] = {}
|
||||
|
||||
if not project_seeds:
|
||||
logger.warning(
|
||||
"Task: workspace_seed_task -> No project seeds found. Skipping project creation."
|
||||
)
|
||||
return projects_map
|
||||
|
||||
for project_seed in project_seeds:
|
||||
project_id = project_seed.pop("id")
|
||||
# Remove the name from seed data since we want to use workspace name
|
||||
project_seed.pop("name", None)
|
||||
project_seed.pop("identifier", None)
|
||||
|
||||
project = Project.objects.create(
|
||||
**project_seed,
|
||||
workspace=workspace,
|
||||
name=workspace.name, # Use workspace name
|
||||
identifier=project_identifier,
|
||||
created_by_id=workspace.created_by_id,
|
||||
)
|
||||
|
||||
# Create project members
|
||||
ProjectMember.objects.bulk_create(
|
||||
[
|
||||
ProjectMember(
|
||||
project=project,
|
||||
member_id=workspace_member["member_id"],
|
||||
role=workspace_member["role"],
|
||||
workspace_id=workspace.id,
|
||||
created_by_id=workspace.created_by_id,
|
||||
)
|
||||
for workspace_member in workspace_members
|
||||
]
|
||||
)
|
||||
|
||||
# Create issue user properties
|
||||
IssueUserProperty.objects.bulk_create(
|
||||
[
|
||||
IssueUserProperty(
|
||||
project=project,
|
||||
user_id=workspace_member["member_id"],
|
||||
workspace_id=workspace.id,
|
||||
display_filters={
|
||||
"group_by": None,
|
||||
"order_by": "sort_order",
|
||||
"type": None,
|
||||
"sub_issue": True,
|
||||
"show_empty_groups": True,
|
||||
"layout": "list",
|
||||
"calendar_date_range": "",
|
||||
},
|
||||
created_by_id=workspace.created_by_id,
|
||||
)
|
||||
for workspace_member in workspace_members
|
||||
]
|
||||
)
|
||||
# update map
|
||||
projects_map[project_id] = project.id
|
||||
logger.info(f"Task: workspace_seed_task -> Project {project_id} created")
|
||||
|
||||
return projects_map
|
||||
|
||||
|
||||
def create_project_states(
|
||||
workspace: Workspace, project_map: Dict[int, uuid.UUID]
|
||||
) -> Dict[int, uuid.UUID]:
|
||||
"""Creates states for each project in the workspace.
|
||||
|
||||
Args:
|
||||
workspace: The workspace containing the projects
|
||||
project_map: Mapping of seed project IDs to actual project IDs
|
||||
|
||||
Returns:
|
||||
A mapping of seed state IDs to actual state IDs
|
||||
"""
|
||||
|
||||
state_seeds = read_seed_file("states.json")
|
||||
state_map: Dict[int, uuid.UUID] = {}
|
||||
|
||||
if not state_seeds:
|
||||
return state_map
|
||||
|
||||
for state_seed in state_seeds:
|
||||
state_id = state_seed.pop("id")
|
||||
project_id = state_seed.pop("project_id")
|
||||
|
||||
state = State.objects.create(
|
||||
**state_seed,
|
||||
project_id=project_map[project_id],
|
||||
workspace=workspace,
|
||||
created_by_id=workspace.created_by_id,
|
||||
)
|
||||
|
||||
state_map[state_id] = state.id
|
||||
logger.info(f"Task: workspace_seed_task -> State {state_id} created")
|
||||
return state_map
|
||||
|
||||
|
||||
def create_project_labels(
|
||||
workspace: Workspace, project_map: Dict[int, uuid.UUID]
|
||||
) -> Dict[int, uuid.UUID]:
|
||||
"""Creates labels for each project in the workspace.
|
||||
|
||||
Args:
|
||||
workspace: The workspace containing the projects
|
||||
project_map: Mapping of seed project IDs to actual project IDs
|
||||
|
||||
Returns:
|
||||
A mapping of seed label IDs to actual label IDs
|
||||
"""
|
||||
label_seeds = read_seed_file("labels.json")
|
||||
label_map: Dict[int, uuid.UUID] = {}
|
||||
|
||||
if not label_seeds:
|
||||
return label_map
|
||||
|
||||
for label_seed in label_seeds:
|
||||
label_id = label_seed.pop("id")
|
||||
project_id = label_seed.pop("project_id")
|
||||
label = Label.objects.create(
|
||||
**label_seed,
|
||||
project_id=project_map[project_id],
|
||||
workspace=workspace,
|
||||
created_by_id=workspace.created_by_id,
|
||||
)
|
||||
label_map[label_id] = label.id
|
||||
|
||||
logger.info(f"Task: workspace_seed_task -> Label {label_id} created")
|
||||
return label_map
|
||||
|
||||
|
||||
def create_project_issues(
|
||||
workspace: Workspace,
|
||||
project_map: Dict[int, uuid.UUID],
|
||||
states_map: Dict[int, uuid.UUID],
|
||||
labels_map: Dict[int, uuid.UUID],
|
||||
) -> None:
|
||||
"""Creates issues and their associated records for each project.
|
||||
|
||||
Creates issues along with their sequences, activities, and label associations.
|
||||
|
||||
Args:
|
||||
workspace: The workspace containing the projects
|
||||
project_map: Mapping of seed project IDs to actual project IDs
|
||||
states_map: Mapping of seed state IDs to actual state IDs
|
||||
labels_map: Mapping of seed label IDs to actual label IDs
|
||||
"""
|
||||
issue_seeds = read_seed_file("issues.json")
|
||||
|
||||
if not issue_seeds:
|
||||
return
|
||||
|
||||
for issue_seed in issue_seeds:
|
||||
required_fields = ["id", "labels", "project_id", "state_id"]
|
||||
# get the values
|
||||
for field in required_fields:
|
||||
if field not in issue_seed:
|
||||
logger.error(
|
||||
f"Task: workspace_seed_task -> Required field '{field}' missing in issue seed"
|
||||
)
|
||||
continue
|
||||
|
||||
# get the values
|
||||
issue_id = issue_seed.pop("id")
|
||||
labels = issue_seed.pop("labels")
|
||||
project_id = issue_seed.pop("project_id")
|
||||
state_id = issue_seed.pop("state_id")
|
||||
|
||||
issue = Issue.objects.create(
|
||||
**issue_seed,
|
||||
state_id=states_map[state_id],
|
||||
project_id=project_map[project_id],
|
||||
workspace=workspace,
|
||||
created_by_id=workspace.created_by_id,
|
||||
)
|
||||
IssueSequence.objects.create(
|
||||
issue=issue,
|
||||
project_id=project_map[project_id],
|
||||
workspace_id=workspace.id,
|
||||
created_by_id=workspace.created_by_id,
|
||||
)
|
||||
|
||||
IssueActivity.objects.create(
|
||||
issue=issue,
|
||||
project_id=project_map[project_id],
|
||||
workspace_id=workspace.id,
|
||||
comment="created the issue",
|
||||
verb="created",
|
||||
actor_id=workspace.created_by_id,
|
||||
epoch=time.time(),
|
||||
)
|
||||
|
||||
for label_id in labels:
|
||||
IssueLabel.objects.create(
|
||||
issue=issue,
|
||||
label_id=labels_map[label_id],
|
||||
project_id=project_map[project_id],
|
||||
workspace_id=workspace.id,
|
||||
created_by_id=workspace.created_by_id,
|
||||
)
|
||||
|
||||
logger.info(f"Task: workspace_seed_task -> Issue {issue_id} created")
|
||||
return
|
||||
|
||||
|
||||
@shared_task
|
||||
def workspace_seed(workspace_id: uuid.UUID) -> None:
|
||||
"""Seeds a new workspace with initial project data.
|
||||
|
||||
Creates a complete workspace setup including:
|
||||
- Projects and project members
|
||||
- Project states
|
||||
- Project labels
|
||||
- Issues and their associations
|
||||
|
||||
Args:
|
||||
workspace_id: ID of the workspace to seed
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Task: workspace_seed_task -> Seeding workspace {workspace_id}")
|
||||
# Get the workspace
|
||||
workspace = Workspace.objects.get(id=workspace_id)
|
||||
|
||||
# Create a project with the same name as workspace
|
||||
project_map = create_project_and_member(workspace)
|
||||
|
||||
# Create project states
|
||||
state_map = create_project_states(workspace, project_map)
|
||||
|
||||
# Create project labels
|
||||
label_map = create_project_labels(workspace, project_map)
|
||||
|
||||
# create project issues
|
||||
create_project_issues(workspace, project_map, state_map, label_map)
|
||||
|
||||
logger.info(
|
||||
f"Task: workspace_seed_task -> Workspace {workspace_id} seeded successfully"
|
||||
)
|
||||
return
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Task: workspace_seed_task -> Failed to seed workspace {workspace_id}: {str(e)}"
|
||||
)
|
||||
raise e
|
||||
@@ -1,8 +1,16 @@
|
||||
# Python imports
|
||||
import os
|
||||
import logging
|
||||
|
||||
# Third party imports
|
||||
from celery import Celery
|
||||
from plane.settings.redis import redis_instance
|
||||
from pythonjsonlogger.jsonlogger import JsonFormatter
|
||||
from celery.signals import after_setup_logger, after_setup_task_logger
|
||||
from celery.schedules import crontab
|
||||
|
||||
# Module imports
|
||||
from plane.settings.redis import redis_instance
|
||||
|
||||
# Set the default Django settings module for the 'celery' program.
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
|
||||
|
||||
@@ -47,6 +55,28 @@ app.conf.beat_schedule = {
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Setup logging
|
||||
@after_setup_logger.connect
|
||||
def setup_loggers(logger, *args, **kwargs):
|
||||
formatter = JsonFormatter(
|
||||
'"%(levelname)s %(asctime)s %(module)s %(name)s %(message)s'
|
||||
)
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(fmt=formatter)
|
||||
logger.addHandler(handler)
|
||||
|
||||
|
||||
@after_setup_task_logger.connect
|
||||
def setup_task_loggers(logger, *args, **kwargs):
|
||||
formatter = JsonFormatter(
|
||||
'"%(levelname)s %(asctime)s %(module)s %(name)s %(message)s'
|
||||
)
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(fmt=formatter)
|
||||
logger.addHandler(handler)
|
||||
|
||||
|
||||
# Load task modules from all registered Django app configs.
|
||||
app.autodiscover_tasks()
|
||||
|
||||
|
||||
@@ -5,7 +5,9 @@ from plane.db.models import Workspace
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Updates the slug of a soft-deleted workspace by appending the epoch timestamp"
|
||||
help = (
|
||||
"Updates the slug of a soft-deleted workspace by appending the epoch timestamp"
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
@@ -75,4 +77,4 @@ class Command(BaseCommand):
|
||||
self.style.ERROR(
|
||||
f"Error updating workspace '{workspace.name}': {str(e)}"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
# Generated by Django 4.2.17 on 2025-03-04 19:29
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("db", "0092_alter_deprecateddashboardwidget_unique_together_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="page",
|
||||
name="moved_to_page",
|
||||
field=models.UUIDField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="page",
|
||||
name="moved_to_project",
|
||||
field=models.UUIDField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="pageversion",
|
||||
name="sub_pages_data",
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
]
|
||||
25
apiserver/plane/db/migrations/0094_auto_20250425_0902.py
Normal file
25
apiserver/plane/db/migrations/0094_auto_20250425_0902.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 4.2.17 on 2025-04-25 09:02
|
||||
|
||||
from django.db import migrations, models
|
||||
from plane.db.models.intake import SourceType
|
||||
|
||||
def set_default_source_type(apps, schema_editor):
|
||||
IntakeIssue = apps.get_model("db", "IntakeIssue")
|
||||
IntakeIssue.objects.filter(source__iexact="in-app").update(source=SourceType.IN_APP)
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('db', '0093_page_moved_to_page_page_moved_to_project_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
set_default_source_type,
|
||||
migrations.RunPython.noop,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='profile',
|
||||
name='start_of_the_week',
|
||||
field=models.PositiveSmallIntegerField(choices=[(0, 'Sunday'), (1, 'Monday'), (2, 'Tuesday'), (3, 'Wednesday'), (4, 'Thursday'), (5, 'Friday'), (6, 'Saturday')], default=0),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.14 on 2025-05-09 11:31
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0094_auto_20250425_0902'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='page',
|
||||
name='external_id',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='page',
|
||||
name='external_source',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user