mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Compare commits
154 Commits
fix-sideba
...
enum-to-ob
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
409b2b78f1 | ||
|
|
ebc2bdcd3a | ||
|
|
11b222ece8 | ||
|
|
c1a078ef3f | ||
|
|
ad11a34efc | ||
|
|
9c28db8b7b | ||
|
|
32d5fea3d3 | ||
|
|
6adc721b34 | ||
|
|
531748dcc3 | ||
|
|
9965f48ba7 | ||
|
|
d15d7549f7 | ||
|
|
8fcffd2338 | ||
|
|
07e937cd8e | ||
|
|
1f1b421735 | ||
|
|
5a43ec8411 | ||
|
|
c86e7e02bc | ||
|
|
d91d7a2f60 | ||
|
|
b3b285b1e5 | ||
|
|
11debee402 | ||
|
|
1608e4f122 | ||
|
|
edeeee1227 | ||
|
|
9ff238816b | ||
|
|
6bd5caf008 | ||
|
|
c021aff58f | ||
|
|
683be55883 | ||
|
|
970ce8cf26 | ||
|
|
cbbe1a4e4d | ||
|
|
6a74677cc9 | ||
|
|
f6ea4f931d | ||
|
|
950fcfdb40 | ||
|
|
053c895120 | ||
|
|
245167e8aa | ||
|
|
6be3f0ea73 | ||
|
|
14d2d69120 | ||
|
|
570a9e319e | ||
|
|
469a027bb6 | ||
|
|
8c99a7df88 | ||
|
|
f34f078bd2 | ||
|
|
0fe2549bc6 | ||
|
|
118964de01 | ||
|
|
9f37f1ef0e | ||
|
|
986f29d1f2 | ||
|
|
1113f9fc19 | ||
|
|
ef3ec7274c | ||
|
|
a0a45b7916 | ||
|
|
2792d48288 | ||
|
|
b2ccca0567 | ||
|
|
2e822b38e4 | ||
|
|
e570fe404f | ||
|
|
48b613ae66 | ||
|
|
e70105235b | ||
|
|
7766e8b5cf | ||
|
|
16d63abcdc | ||
|
|
0568b8d583 | ||
|
|
64da29b0d9 | ||
|
|
7c336a65c4 | ||
|
|
2242a85e5c | ||
|
|
323920a358 | ||
|
|
151fc8389e | ||
|
|
0f828fd5e0 | ||
|
|
67cbe94d4a | ||
|
|
322af8c436 | ||
|
|
41c2aefad4 | ||
|
|
445c819fbd | ||
|
|
046a8a1bcf | ||
|
|
099a1cc12b | ||
|
|
a0a697401b | ||
|
|
cb92108bf4 | ||
|
|
01b685ea57 | ||
|
|
b16a585102 | ||
|
|
4a97d7c28c | ||
|
|
141cb17e8a | ||
|
|
26b62c4a70 | ||
|
|
e388a9a279 | ||
|
|
a3a580923c | ||
|
|
b4bc49971c | ||
|
|
04c7c53e09 | ||
|
|
78cc32765b | ||
|
|
4e485d6402 | ||
|
|
5a208cb1b9 | ||
|
|
0eafbb698a | ||
|
|
193ae9bfc8 | ||
|
|
7cb5a9120a | ||
|
|
84fc81dd98 | ||
|
|
2d0c0c7f8a | ||
|
|
5c9bdb1cea | ||
|
|
f8ca1e46b1 | ||
|
|
a3b9152a9b | ||
|
|
5223bd01e8 | ||
|
|
6eb0b5ddb0 | ||
|
|
cd200169b6 | ||
|
|
037bb88b53 | ||
|
|
643390e723 | ||
|
|
731c4e8fcd | ||
|
|
6216ad77f4 | ||
|
|
9812129ad3 | ||
|
|
5226b17f90 | ||
|
|
b376e5300a | ||
|
|
4460529b37 | ||
|
|
0a8cc24da5 | ||
|
|
2f4aa843fc | ||
|
|
cfac8ce350 | ||
|
|
75a11ba31a | ||
|
|
1fc3709731 | ||
|
|
7e21618762 | ||
|
|
2d475491e9 | ||
|
|
2a2feaf88e | ||
|
|
e48b2da623 | ||
|
|
9c9952a823 | ||
|
|
906ce8b500 | ||
|
|
6c483fad2f | ||
|
|
5b776392bd | ||
|
|
ba158d5d6e | ||
|
|
084cc75726 | ||
|
|
534f5c7dd0 | ||
|
|
080cf70e3f | ||
|
|
4c3f7f27a5 | ||
|
|
803f6cc62a | ||
|
|
3a6d0c11fb | ||
|
|
75d81f9e95 | ||
|
|
0d5c7c6653 | ||
|
|
079c3a3a99 | ||
|
|
5f8d5ea388 | ||
|
|
8613a80b16 | ||
|
|
dc16f2862e | ||
|
|
e68d344410 | ||
|
|
26c8cba322 | ||
|
|
b435ceedfc | ||
|
|
13c46e0fdf | ||
|
|
02bccb44d6 | ||
|
|
b5634f5fa1 | ||
|
|
64aae0a2ac | ||
|
|
a263bfc01f | ||
|
|
50082f0843 | ||
|
|
30db59534d | ||
|
|
e401c9d6e4 | ||
|
|
39b5736c83 | ||
|
|
2785419d12 | ||
|
|
ac5b974d67 | ||
|
|
14ebaf0799 | ||
|
|
7cdb622663 | ||
|
|
855e4a3218 | ||
|
|
d456767492 | ||
|
|
6faff1d556 | ||
|
|
bc2936dcd3 | ||
|
|
d366ac1581 | ||
|
|
0a01e0eb41 | ||
|
|
b4cc2d83fe | ||
|
|
42e2b787f0 | ||
|
|
461e099bbc | ||
|
|
45e25ce18b | ||
|
|
4d88dbaf49 | ||
|
|
e61ff879c4 | ||
|
|
adeb7d977d |
@@ -2,6 +2,7 @@
|
||||
*.pyc
|
||||
.env
|
||||
venv
|
||||
.venv
|
||||
node_modules/
|
||||
**/node_modules/
|
||||
npm-debug.log
|
||||
@@ -14,4 +15,4 @@ build/
|
||||
out/
|
||||
**/out/
|
||||
dist/
|
||||
**/dist/
|
||||
**/dist/
|
||||
|
||||
1
.github/workflows/build-branch.yml
vendored
1
.github/workflows/build-branch.yml
vendored
@@ -290,5 +290,6 @@ jobs:
|
||||
${{ github.workspace }}/deploy/selfhost/setup.sh
|
||||
${{ github.workspace }}/deploy/selfhost/swarm.sh
|
||||
${{ github.workspace }}/deploy/selfhost/restore.sh
|
||||
${{ github.workspace }}/deploy/selfhost/restore-airgapped.sh
|
||||
${{ github.workspace }}/deploy/selfhost/docker-compose.yml
|
||||
${{ github.workspace }}/deploy/selfhost/variables.env
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -53,6 +53,8 @@ mediafiles
|
||||
.env
|
||||
.DS_Store
|
||||
logs/
|
||||
htmlcov/
|
||||
.coverage
|
||||
|
||||
node_modules/
|
||||
assets/dist/
|
||||
|
||||
@@ -69,14 +69,14 @@ chmod +x setup.sh
|
||||
docker compose -f docker-compose-local.yml up
|
||||
```
|
||||
|
||||
5. Start web apps:
|
||||
4. Start web apps:
|
||||
|
||||
```bash
|
||||
yarn dev
|
||||
```
|
||||
|
||||
6. Open your browser to http://localhost:3001/god-mode/ and register yourself as instance admin
|
||||
7. Open up your browser to http://localhost:3000 then log in using the same credentials from the previous step
|
||||
5. Open your browser to http://localhost:3001/god-mode/ and register yourself as instance admin
|
||||
6. Open up your browser to http://localhost:3000 then log in using the same credentials from the previous step
|
||||
|
||||
That’s it! You’re all set to begin coding. Remember to refresh your browser if changes don’t auto-reload. Happy contributing! 🎉
|
||||
|
||||
|
||||
@@ -26,16 +26,16 @@ export const InstanceAIForm: FC<IInstanceAIForm> = (props) => {
|
||||
formState: { errors, isSubmitting },
|
||||
} = useForm<AIFormValues>({
|
||||
defaultValues: {
|
||||
OPENAI_API_KEY: config["OPENAI_API_KEY"],
|
||||
GPT_ENGINE: config["GPT_ENGINE"],
|
||||
LLM_API_KEY: config["LLM_API_KEY"],
|
||||
LLM_MODEL: config["LLM_MODEL"],
|
||||
},
|
||||
});
|
||||
|
||||
const aiFormFields: TControllerInputFormField[] = [
|
||||
{
|
||||
key: "GPT_ENGINE",
|
||||
key: "LLM_MODEL",
|
||||
type: "text",
|
||||
label: "GPT_ENGINE",
|
||||
label: "LLM Model",
|
||||
description: (
|
||||
<>
|
||||
Choose an OpenAI engine.{" "}
|
||||
@@ -49,12 +49,12 @@ export const InstanceAIForm: FC<IInstanceAIForm> = (props) => {
|
||||
</a>
|
||||
</>
|
||||
),
|
||||
placeholder: "gpt-3.5-turbo",
|
||||
error: Boolean(errors.GPT_ENGINE),
|
||||
placeholder: "gpt-4o-mini",
|
||||
error: Boolean(errors.LLM_MODEL),
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
key: "OPENAI_API_KEY",
|
||||
key: "LLM_API_KEY",
|
||||
type: "password",
|
||||
label: "API key",
|
||||
description: (
|
||||
@@ -71,7 +71,7 @@ export const InstanceAIForm: FC<IInstanceAIForm> = (props) => {
|
||||
</>
|
||||
),
|
||||
placeholder: "sk-asddassdfasdefqsdfasd23das3dasdcasd",
|
||||
error: Boolean(errors.OPENAI_API_KEY),
|
||||
error: Boolean(errors.LLM_API_KEY),
|
||||
required: false,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -98,11 +98,7 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
|
||||
key: "GITHUB_ORGANIZATION_ID",
|
||||
type: "text",
|
||||
label: "Organization ID",
|
||||
description: (
|
||||
<>
|
||||
The organization github ID.
|
||||
</>
|
||||
),
|
||||
description: <>The organization github ID.</>,
|
||||
placeholder: "123456789",
|
||||
error: Boolean(errors.GITHUB_ORGANIZATION_ID),
|
||||
required: false,
|
||||
|
||||
@@ -10,11 +10,13 @@ type Props = {
|
||||
handleClose: () => void;
|
||||
};
|
||||
|
||||
enum ESendEmailSteps {
|
||||
SEND_EMAIL = "SEND_EMAIL",
|
||||
SUCCESS = "SUCCESS",
|
||||
FAILED = "FAILED",
|
||||
}
|
||||
const ESendEmailSteps = {
|
||||
SEND_EMAIL: "SEND_EMAIL",
|
||||
SUCCESS: "SUCCESS",
|
||||
FAILED: "FAILED",
|
||||
} as const;
|
||||
|
||||
type ESendEmailSteps = typeof ESendEmailSteps[keyof typeof ESendEmailSteps];
|
||||
|
||||
const instanceService = new InstanceService();
|
||||
|
||||
|
||||
@@ -3,18 +3,16 @@
|
||||
import { ReactNode } from "react";
|
||||
import { ThemeProvider, useTheme } from "next-themes";
|
||||
import { SWRConfig } from "swr";
|
||||
// ui
|
||||
// plane imports
|
||||
import { ADMIN_BASE_PATH, DEFAULT_SWR_CONFIG } from "@plane/constants";
|
||||
import { Toast } from "@plane/ui";
|
||||
import { resolveGeneralTheme } from "@plane/utils";
|
||||
// constants
|
||||
// helpers
|
||||
// lib
|
||||
import { InstanceProvider } from "@/lib/instance-provider";
|
||||
import { StoreProvider } from "@/lib/store-provider";
|
||||
import { UserProvider } from "@/lib/user-provider";
|
||||
// styles
|
||||
import "./globals.css";
|
||||
import "@/styles/globals.css";
|
||||
|
||||
const ToastWithTheme = () => {
|
||||
const { resolvedTheme } = useTheme();
|
||||
|
||||
@@ -7,7 +7,7 @@ import { LogOut, UserCog2, Palette } from "lucide-react";
|
||||
import { Menu, Transition } from "@headlessui/react";
|
||||
// plane internal packages
|
||||
import { API_BASE_URL } from "@plane/constants";
|
||||
import {AuthService } from "@plane/services";
|
||||
import { AuthService } from "@plane/services";
|
||||
import { Avatar } from "@plane/ui";
|
||||
import { getFileURL, cn } from "@plane/utils";
|
||||
// hooks
|
||||
|
||||
@@ -16,14 +16,16 @@ import { Banner, PasswordStrengthMeter } from "@/components/common";
|
||||
const authService = new AuthService();
|
||||
|
||||
// error codes
|
||||
enum EErrorCodes {
|
||||
INSTANCE_NOT_CONFIGURED = "INSTANCE_NOT_CONFIGURED",
|
||||
ADMIN_ALREADY_EXIST = "ADMIN_ALREADY_EXIST",
|
||||
REQUIRED_EMAIL_PASSWORD_FIRST_NAME = "REQUIRED_EMAIL_PASSWORD_FIRST_NAME",
|
||||
INVALID_EMAIL = "INVALID_EMAIL",
|
||||
INVALID_PASSWORD = "INVALID_PASSWORD",
|
||||
USER_ALREADY_EXISTS = "USER_ALREADY_EXISTS",
|
||||
}
|
||||
const EErrorCodes = {
|
||||
INSTANCE_NOT_CONFIGURED: "INSTANCE_NOT_CONFIGURED",
|
||||
ADMIN_ALREADY_EXIST: "ADMIN_ALREADY_EXIST",
|
||||
REQUIRED_EMAIL_PASSWORD_FIRST_NAME: "REQUIRED_EMAIL_PASSWORD_FIRST_NAME",
|
||||
INVALID_EMAIL: "INVALID_EMAIL",
|
||||
INVALID_PASSWORD: "INVALID_PASSWORD",
|
||||
USER_ALREADY_EXISTS: "USER_ALREADY_EXISTS",
|
||||
} as const;
|
||||
|
||||
type EErrorCodes = typeof EErrorCodes[keyof typeof EErrorCodes];
|
||||
|
||||
type TError = {
|
||||
type: EErrorCodes | undefined;
|
||||
@@ -144,7 +146,7 @@ export const InstanceSetupForm: FC = (props) => {
|
||||
|
||||
{errorData.type &&
|
||||
errorData?.message &&
|
||||
![EErrorCodes.INVALID_EMAIL, EErrorCodes.INVALID_PASSWORD].includes(errorData.type) && (
|
||||
!([EErrorCodes.INVALID_EMAIL, EErrorCodes.INVALID_PASSWORD] as EErrorCodes[]).includes(errorData.type) && (
|
||||
<Banner type="error" message={errorData?.message} />
|
||||
)}
|
||||
|
||||
|
||||
@@ -18,13 +18,15 @@ import { AuthBanner } from "../authentication";
|
||||
const authService = new AuthService();
|
||||
|
||||
// error codes
|
||||
enum EErrorCodes {
|
||||
INSTANCE_NOT_CONFIGURED = "INSTANCE_NOT_CONFIGURED",
|
||||
REQUIRED_EMAIL_PASSWORD = "REQUIRED_EMAIL_PASSWORD",
|
||||
INVALID_EMAIL = "INVALID_EMAIL",
|
||||
USER_DOES_NOT_EXIST = "USER_DOES_NOT_EXIST",
|
||||
AUTHENTICATION_FAILED = "AUTHENTICATION_FAILED",
|
||||
}
|
||||
const EErrorCodes = {
|
||||
INSTANCE_NOT_CONFIGURED: "INSTANCE_NOT_CONFIGURED",
|
||||
REQUIRED_EMAIL_PASSWORD: "REQUIRED_EMAIL_PASSWORD",
|
||||
INVALID_EMAIL: "INVALID_EMAIL",
|
||||
USER_DOES_NOT_EXIST: "USER_DOES_NOT_EXIST",
|
||||
AUTHENTICATION_FAILED: "AUTHENTICATION_FAILED",
|
||||
} as const;
|
||||
|
||||
type EErrorCodes = typeof EErrorCodes[keyof typeof EErrorCodes];
|
||||
|
||||
type TError = {
|
||||
type: EErrorCodes | undefined;
|
||||
|
||||
@@ -20,13 +20,15 @@ import githubDarkModeImage from "@/public/logos/github-white.png";
|
||||
import GitlabLogo from "@/public/logos/gitlab-logo.svg";
|
||||
import GoogleLogo from "@/public/logos/google-logo.svg";
|
||||
|
||||
export enum EErrorAlertType {
|
||||
BANNER_ALERT = "BANNER_ALERT",
|
||||
INLINE_FIRST_NAME = "INLINE_FIRST_NAME",
|
||||
INLINE_EMAIL = "INLINE_EMAIL",
|
||||
INLINE_PASSWORD = "INLINE_PASSWORD",
|
||||
INLINE_EMAIL_CODE = "INLINE_EMAIL_CODE",
|
||||
}
|
||||
export const EErrorAlertType = {
|
||||
BANNER_ALERT: "BANNER_ALERT",
|
||||
INLINE_FIRST_NAME: "INLINE_FIRST_NAME",
|
||||
INLINE_EMAIL: "INLINE_EMAIL",
|
||||
INLINE_PASSWORD: "INLINE_PASSWORD",
|
||||
INLINE_EMAIL_CODE: "INLINE_EMAIL_CODE",
|
||||
} as const;
|
||||
|
||||
export type EErrorAlertType = typeof EErrorAlertType[keyof typeof EErrorAlertType];
|
||||
|
||||
const errorCodeMessages: {
|
||||
[key in EAdminAuthErrorCodes]: { title: string; message: (email?: string | undefined) => ReactNode };
|
||||
|
||||
@@ -2,7 +2,7 @@ import set from "lodash/set";
|
||||
import { observable, action, computed, makeObservable, runInAction } from "mobx";
|
||||
// plane internal packages
|
||||
import { EInstanceStatus, TInstanceStatus } from "@plane/constants";
|
||||
import {InstanceService} from "@plane/services";
|
||||
import { InstanceService } from "@plane/services";
|
||||
import {
|
||||
IInstance,
|
||||
IInstanceAdmin,
|
||||
|
||||
@@ -1 +1 @@
|
||||
export * from "ce/components/authentication/authentication-modes";
|
||||
export * from "ce/components/authentication/authentication-modes";
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "admin",
|
||||
"description": "Admin UI for Plane",
|
||||
"version": "0.26.0",
|
||||
"version": "0.26.1",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
@@ -10,6 +10,7 @@
|
||||
"build": "next build",
|
||||
"preview": "next build && next start",
|
||||
"start": "next start",
|
||||
"format": "prettier --write .",
|
||||
"lint": "eslint . --ext .ts,.tsx",
|
||||
"lint:errors": "eslint . --ext .ts,.tsx --quiet"
|
||||
},
|
||||
@@ -17,6 +18,7 @@
|
||||
"@headlessui/react": "^1.7.19",
|
||||
"@plane/constants": "*",
|
||||
"@plane/hooks": "*",
|
||||
"@plane/propel": "*",
|
||||
"@plane/services": "*",
|
||||
"@plane/types": "*",
|
||||
"@plane/ui": "*",
|
||||
@@ -29,7 +31,7 @@
|
||||
"lucide-react": "^0.469.0",
|
||||
"mobx": "^6.12.0",
|
||||
"mobx-react": "^9.1.1",
|
||||
"next": "^14.2.28",
|
||||
"next": "^14.2.29",
|
||||
"next-themes": "^0.2.1",
|
||||
"postcss": "^8.4.38",
|
||||
"react": "^18.3.1",
|
||||
@@ -48,6 +50,6 @@
|
||||
"@types/react-dom": "^18.2.18",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"@types/zxcvbn": "^4.4.4",
|
||||
"typescript": "5.3.3"
|
||||
"typescript": "5.8.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,2 @@
|
||||
module.exports = {
|
||||
plugins: {
|
||||
"postcss-import": {},
|
||||
"tailwindcss/nesting": {},
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
};
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
module.exports = require("@plane/tailwind-config/postcss.config.js");
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
@import url("https://fonts.googleapis.com/css2?family=Inter:wght@200;300;400;500;600;700;800&display=swap");
|
||||
@import url("https://fonts.googleapis.com/css2?family=Material+Symbols+Rounded:opsz,wght,FILL,GRAD@48,400,0,0&display=swap");
|
||||
@import "@plane/propel/styles/fonts";
|
||||
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@@ -60,23 +59,31 @@
|
||||
--color-border-300: 212, 212, 212; /* strong border- 1 */
|
||||
--color-border-400: 185, 185, 185; /* strong border- 2 */
|
||||
|
||||
--color-shadow-2xs: 0px 0px 1px 0px rgba(23, 23, 23, 0.06), 0px 1px 2px 0px rgba(23, 23, 23, 0.06),
|
||||
--color-shadow-2xs:
|
||||
0px 0px 1px 0px rgba(23, 23, 23, 0.06), 0px 1px 2px 0px rgba(23, 23, 23, 0.06),
|
||||
0px 1px 2px 0px rgba(23, 23, 23, 0.14);
|
||||
--color-shadow-xs: 0px 1px 2px 0px rgba(0, 0, 0, 0.16), 0px 2px 4px 0px rgba(16, 24, 40, 0.12),
|
||||
--color-shadow-xs:
|
||||
0px 1px 2px 0px rgba(0, 0, 0, 0.16), 0px 2px 4px 0px rgba(16, 24, 40, 0.12),
|
||||
0px 1px 8px -1px rgba(16, 24, 40, 0.1);
|
||||
--color-shadow-sm: 0px 1px 4px 0px rgba(0, 0, 0, 0.01), 0px 4px 8px 0px rgba(0, 0, 0, 0.02),
|
||||
0px 1px 12px 0px rgba(0, 0, 0, 0.12);
|
||||
--color-shadow-rg: 0px 3px 6px 0px rgba(0, 0, 0, 0.1), 0px 4px 4px 0px rgba(16, 24, 40, 0.08),
|
||||
--color-shadow-sm:
|
||||
0px 1px 4px 0px rgba(0, 0, 0, 0.01), 0px 4px 8px 0px rgba(0, 0, 0, 0.02), 0px 1px 12px 0px rgba(0, 0, 0, 0.12);
|
||||
--color-shadow-rg:
|
||||
0px 3px 6px 0px rgba(0, 0, 0, 0.1), 0px 4px 4px 0px rgba(16, 24, 40, 0.08),
|
||||
0px 1px 12px 0px rgba(16, 24, 40, 0.04);
|
||||
--color-shadow-md: 0px 4px 8px 0px rgba(0, 0, 0, 0.12), 0px 6px 12px 0px rgba(16, 24, 40, 0.12),
|
||||
--color-shadow-md:
|
||||
0px 4px 8px 0px rgba(0, 0, 0, 0.12), 0px 6px 12px 0px rgba(16, 24, 40, 0.12),
|
||||
0px 1px 16px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-lg: 0px 6px 12px 0px rgba(0, 0, 0, 0.12), 0px 8px 16px 0px rgba(0, 0, 0, 0.12),
|
||||
--color-shadow-lg:
|
||||
0px 6px 12px 0px rgba(0, 0, 0, 0.12), 0px 8px 16px 0px rgba(0, 0, 0, 0.12),
|
||||
0px 1px 24px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-xl: 0px 0px 18px 0px rgba(0, 0, 0, 0.16), 0px 0px 24px 0px rgba(16, 24, 40, 0.16),
|
||||
--color-shadow-xl:
|
||||
0px 0px 18px 0px rgba(0, 0, 0, 0.16), 0px 0px 24px 0px rgba(16, 24, 40, 0.16),
|
||||
0px 0px 52px 0px rgba(16, 24, 40, 0.16);
|
||||
--color-shadow-2xl: 0px 8px 16px 0px rgba(0, 0, 0, 0.12), 0px 12px 24px 0px rgba(16, 24, 40, 0.12),
|
||||
--color-shadow-2xl:
|
||||
0px 8px 16px 0px rgba(0, 0, 0, 0.12), 0px 12px 24px 0px rgba(16, 24, 40, 0.12),
|
||||
0px 1px 32px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-3xl: 0px 12px 24px 0px rgba(0, 0, 0, 0.12), 0px 16px 32px 0px rgba(0, 0, 0, 0.12),
|
||||
--color-shadow-3xl:
|
||||
0px 12px 24px 0px rgba(0, 0, 0, 0.12), 0px 16px 32px 0px rgba(0, 0, 0, 0.12),
|
||||
0px 1px 48px 0px rgba(16, 24, 40, 0.12);
|
||||
--color-shadow-4xl: 0px 8px 40px 0px rgba(0, 0, 61, 0.05), 0px 12px 32px -16px rgba(0, 0, 0, 0.05);
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
{
|
||||
"extends": "@plane/typescript-config/nextjs.json",
|
||||
"compilerOptions": {
|
||||
"plugins": [{ "name": "next" }],
|
||||
"plugins": [
|
||||
{
|
||||
"name": "next"
|
||||
}
|
||||
],
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@/*": ["core/*"],
|
||||
"@/public/*": ["public/*"],
|
||||
"@/plane-admin/*": ["ce/*"]
|
||||
}
|
||||
"@/plane-admin/*": ["ce/*"],
|
||||
"@/styles/*": ["styles/*"]
|
||||
},
|
||||
"strictNullChecks": true
|
||||
},
|
||||
"include": ["next-env.d.ts", "next.config.js", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
|
||||
"exclude": ["node_modules"]
|
||||
|
||||
25
apiserver/.coveragerc
Normal file
25
apiserver/.coveragerc
Normal file
@@ -0,0 +1,25 @@
|
||||
[run]
|
||||
source = plane
|
||||
omit =
|
||||
*/tests/*
|
||||
*/migrations/*
|
||||
*/settings/*
|
||||
*/wsgi.py
|
||||
*/asgi.py
|
||||
*/urls.py
|
||||
manage.py
|
||||
*/admin.py
|
||||
*/apps.py
|
||||
|
||||
[report]
|
||||
exclude_lines =
|
||||
pragma: no cover
|
||||
def __repr__
|
||||
if self.debug:
|
||||
raise NotImplementedError
|
||||
if __name__ == .__main__.
|
||||
pass
|
||||
raise ImportError
|
||||
|
||||
[html]
|
||||
directory = htmlcov
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "plane-api",
|
||||
"version": "0.26.0",
|
||||
"version": "0.26.1",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"description": "API server powering Plane's backend"
|
||||
|
||||
@@ -15,4 +15,4 @@ from .state import StateLiteSerializer, StateSerializer
|
||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
|
||||
from .module import ModuleSerializer, ModuleIssueSerializer, ModuleLiteSerializer
|
||||
from .intake import IntakeIssueSerializer
|
||||
from .estimate import EstimatePointSerializer
|
||||
from .estimate import EstimatePointSerializer
|
||||
|
||||
@@ -160,12 +160,15 @@ class IssueSerializer(BaseSerializer):
|
||||
else:
|
||||
try:
|
||||
# Then assign it to default assignee, if it is a valid assignee
|
||||
if default_assignee_id is not None and ProjectMember.objects.filter(
|
||||
member_id=default_assignee_id,
|
||||
project_id=project_id,
|
||||
role__gte=15,
|
||||
is_active=True
|
||||
).exists():
|
||||
if (
|
||||
default_assignee_id is not None
|
||||
and ProjectMember.objects.filter(
|
||||
member_id=default_assignee_id,
|
||||
project_id=project_id,
|
||||
role__gte=15,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
IssueAssignee.objects.create(
|
||||
assignee_id=default_assignee_id,
|
||||
issue=issue,
|
||||
|
||||
@@ -58,7 +58,7 @@ from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
|
||||
from .base import BaseAPIView
|
||||
from plane.utils.host import base_host
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
|
||||
from plane.bgtasks.work_item_link_task import crawl_work_item_link_title
|
||||
|
||||
class WorkspaceIssueAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
@@ -692,6 +692,9 @@ class IssueLinkAPIEndpoint(BaseAPIView):
|
||||
serializer = IssueLinkSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(project_id=project_id, issue_id=issue_id)
|
||||
crawl_work_item_link_title.delay(
|
||||
serializer.data.get("id"), serializer.data.get("url")
|
||||
)
|
||||
|
||||
link = IssueLink.objects.get(pk=serializer.data["id"])
|
||||
link.created_by_id = request.data.get("created_by", request.user.id)
|
||||
@@ -719,6 +722,9 @@ class IssueLinkAPIEndpoint(BaseAPIView):
|
||||
serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
crawl_work_item_link_title.delay(
|
||||
serializer.data.get("id"), serializer.data.get("url")
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="link.activity.updated",
|
||||
requested_data=requested_data,
|
||||
|
||||
@@ -53,6 +53,7 @@ def get_entity_model_and_serializer(entity_type):
|
||||
}
|
||||
return entity_map.get(entity_type, (None, None))
|
||||
|
||||
|
||||
class UserFavoriteSerializer(serializers.ModelSerializer):
|
||||
entity_data = serializers.SerializerMethodField()
|
||||
|
||||
|
||||
@@ -148,10 +148,13 @@ class ProjectMemberAdminSerializer(BaseSerializer):
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ProjectMemberRoleSerializer(DynamicBaseSerializer):
|
||||
class ProjectMemberRoleSerializer(DynamicBaseSerializer):
|
||||
original_role = serializers.IntegerField(source='role', read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ProjectMember
|
||||
fields = ("id", "role", "member", "project")
|
||||
fields = ("id", "role", "member", "project", "original_role", "created_at")
|
||||
read_only_fields = ["original_role", "created_at"]
|
||||
|
||||
|
||||
class ProjectMemberInviteSerializer(BaseSerializer):
|
||||
|
||||
@@ -3,11 +3,22 @@ from rest_framework import serializers
|
||||
|
||||
# Module import
|
||||
from plane.db.models import Account, Profile, User, Workspace, WorkspaceMemberInvite
|
||||
from plane.utils.url import contains_url
|
||||
|
||||
from .base import BaseSerializer
|
||||
|
||||
|
||||
class UserSerializer(BaseSerializer):
|
||||
def validate_first_name(self, value):
|
||||
if contains_url(value):
|
||||
raise serializers.ValidationError("First name cannot contain a URL.")
|
||||
return value
|
||||
|
||||
def validate_last_name(self, value):
|
||||
if contains_url(value):
|
||||
raise serializers.ValidationError("Last name cannot contain a URL.")
|
||||
return value
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
# Exclude password field from the serializer
|
||||
@@ -99,11 +110,16 @@ class UserMeSettingsSerializer(BaseSerializer):
|
||||
workspace_member__member=obj.id,
|
||||
workspace_member__is_active=True,
|
||||
).first()
|
||||
logo_asset_url = workspace.logo_asset.asset_url if workspace.logo_asset is not None else ""
|
||||
return {
|
||||
"last_workspace_id": profile.last_workspace_id,
|
||||
"last_workspace_slug": (
|
||||
workspace.slug if workspace is not None else ""
|
||||
),
|
||||
"last_workspace_name": (
|
||||
workspace.name if workspace is not None else ""
|
||||
),
|
||||
"last_workspace_logo": (logo_asset_url),
|
||||
"fallback_workspace_id": profile.last_workspace_id,
|
||||
"fallback_workspace_slug": (
|
||||
workspace.slug if workspace is not None else ""
|
||||
|
||||
@@ -25,10 +25,12 @@ from plane.db.models import (
|
||||
WorkspaceUserPreference,
|
||||
)
|
||||
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
|
||||
from plane.utils.url import contains_url
|
||||
|
||||
# Django imports
|
||||
from django.core.validators import URLValidator
|
||||
from django.core.exceptions import ValidationError
|
||||
import re
|
||||
|
||||
|
||||
class WorkSpaceSerializer(DynamicBaseSerializer):
|
||||
@@ -36,10 +38,21 @@ class WorkSpaceSerializer(DynamicBaseSerializer):
|
||||
logo_url = serializers.CharField(read_only=True)
|
||||
role = serializers.IntegerField(read_only=True)
|
||||
|
||||
def validate_name(self, value):
|
||||
# Check if the name contains a URL
|
||||
if contains_url(value):
|
||||
raise serializers.ValidationError("Name must not contain URLs")
|
||||
return value
|
||||
|
||||
def validate_slug(self, value):
|
||||
# Check if the slug is restricted
|
||||
if value in RESTRICTED_WORKSPACE_SLUGS:
|
||||
raise serializers.ValidationError("Slug is not valid")
|
||||
# Slug should only contain alphanumeric characters, hyphens, and underscores
|
||||
if not re.match(r"^[a-zA-Z0-9_-]+$", value):
|
||||
raise serializers.ValidationError(
|
||||
"Slug can only contain letters, numbers, hyphens (-), and underscores (_)"
|
||||
)
|
||||
return value
|
||||
|
||||
class Meta:
|
||||
@@ -148,7 +161,6 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def create(self, validated_data):
|
||||
# Filtering the WorkspaceUserLink with the given url to check if the link already exists.
|
||||
|
||||
@@ -157,7 +169,7 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
|
||||
workspace_user_link = WorkspaceUserLink.objects.filter(
|
||||
url=url,
|
||||
workspace_id=validated_data.get("workspace_id"),
|
||||
owner_id=validated_data.get("owner_id")
|
||||
owner_id=validated_data.get("owner_id"),
|
||||
)
|
||||
|
||||
if workspace_user_link.exists():
|
||||
@@ -173,10 +185,8 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
|
||||
url = validated_data.get("url")
|
||||
|
||||
workspace_user_link = WorkspaceUserLink.objects.filter(
|
||||
url=url,
|
||||
workspace_id=instance.workspace_id,
|
||||
owner=instance.owner
|
||||
)
|
||||
url=url, workspace_id=instance.workspace_id, owner=instance.owner
|
||||
)
|
||||
|
||||
if workspace_user_link.exclude(pk=instance.id).exists():
|
||||
raise serializers.ValidationError(
|
||||
@@ -185,6 +195,7 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class IssueRecentVisitSerializer(serializers.ModelSerializer):
|
||||
project_identifier = serializers.SerializerMethodField()
|
||||
|
||||
|
||||
@@ -6,8 +6,14 @@ from plane.app.views import (
|
||||
AnalyticViewViewset,
|
||||
SavedAnalyticEndpoint,
|
||||
ExportAnalyticsEndpoint,
|
||||
AdvanceAnalyticsEndpoint,
|
||||
AdvanceAnalyticsStatsEndpoint,
|
||||
AdvanceAnalyticsChartEndpoint,
|
||||
DefaultAnalyticsEndpoint,
|
||||
ProjectStatsEndpoint,
|
||||
ProjectAdvanceAnalyticsEndpoint,
|
||||
ProjectAdvanceAnalyticsStatsEndpoint,
|
||||
ProjectAdvanceAnalyticsChartEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -49,4 +55,34 @@ urlpatterns = [
|
||||
ProjectStatsEndpoint.as_view(),
|
||||
name="project-analytics",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/advance-analytics/",
|
||||
AdvanceAnalyticsEndpoint.as_view(),
|
||||
name="advance-analytics",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/advance-analytics-stats/",
|
||||
AdvanceAnalyticsStatsEndpoint.as_view(),
|
||||
name="advance-analytics-stats",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/advance-analytics-charts/",
|
||||
AdvanceAnalyticsChartEndpoint.as_view(),
|
||||
name="advance-analytics-chart",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/advance-analytics/",
|
||||
ProjectAdvanceAnalyticsEndpoint.as_view(),
|
||||
name="project-advance-analytics",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/advance-analytics-stats/",
|
||||
ProjectAdvanceAnalyticsStatsEndpoint.as_view(),
|
||||
name="project-advance-analytics-stats",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/advance-analytics-charts/",
|
||||
ProjectAdvanceAnalyticsChartEndpoint.as_view(),
|
||||
name="project-advance-analytics-chart",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -12,6 +12,7 @@ from plane.app.views import (
|
||||
AssetRestoreEndpoint,
|
||||
ProjectAssetEndpoint,
|
||||
ProjectBulkAssetEndpoint,
|
||||
AssetCheckEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -81,5 +82,11 @@ urlpatterns = [
|
||||
path(
|
||||
"assets/v2/workspaces/<str:slug>/projects/<uuid:project_id>/<uuid:entity_id>/bulk/",
|
||||
ProjectBulkAssetEndpoint.as_view(),
|
||||
name="bulk-asset-update",
|
||||
),
|
||||
path(
|
||||
"assets/v2/workspaces/<str:slug>/check/<uuid:asset_id>/",
|
||||
AssetCheckEndpoint.as_view(),
|
||||
name="asset-check",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -106,6 +106,7 @@ from .asset.v2 import (
|
||||
AssetRestoreEndpoint,
|
||||
ProjectAssetEndpoint,
|
||||
ProjectBulkAssetEndpoint,
|
||||
AssetCheckEndpoint,
|
||||
)
|
||||
from .issue.base import (
|
||||
IssueListEndpoint,
|
||||
@@ -199,6 +200,18 @@ from .analytic.base import (
|
||||
ProjectStatsEndpoint,
|
||||
)
|
||||
|
||||
from .analytic.advance import (
|
||||
AdvanceAnalyticsEndpoint,
|
||||
AdvanceAnalyticsStatsEndpoint,
|
||||
AdvanceAnalyticsChartEndpoint,
|
||||
)
|
||||
|
||||
from .analytic.project_analytics import (
|
||||
ProjectAdvanceAnalyticsEndpoint,
|
||||
ProjectAdvanceAnalyticsStatsEndpoint,
|
||||
ProjectAdvanceAnalyticsChartEndpoint,
|
||||
)
|
||||
|
||||
from .notification.base import (
|
||||
NotificationViewSet,
|
||||
UnreadNotificationEndpoint,
|
||||
|
||||
366
apiserver/plane/app/views/analytic/advance.py
Normal file
366
apiserver/plane/app/views/analytic/advance.py
Normal file
@@ -0,0 +1,366 @@
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from typing import Dict, List, Any
|
||||
from django.db.models import QuerySet, Q, Count
|
||||
from django.http import HttpRequest
|
||||
from django.db.models.functions import TruncMonth
|
||||
from django.utils import timezone
|
||||
from plane.app.views.base import BaseAPIView
|
||||
from plane.app.permissions import ROLE, allow_permission
|
||||
from plane.db.models import (
|
||||
WorkspaceMember,
|
||||
Project,
|
||||
Issue,
|
||||
Cycle,
|
||||
Module,
|
||||
IssueView,
|
||||
ProjectPage,
|
||||
Workspace,
|
||||
CycleIssue,
|
||||
ModuleIssue,
|
||||
ProjectMember,
|
||||
)
|
||||
from plane.utils.build_chart import build_analytics_chart
|
||||
from plane.utils.date_utils import (
|
||||
get_analytics_filters,
|
||||
)
|
||||
|
||||
|
||||
class AdvanceAnalyticsBaseView(BaseAPIView):
|
||||
def initialize_workspace(self, slug: str, type: str) -> None:
|
||||
self._workspace_slug = slug
|
||||
self.filters = get_analytics_filters(
|
||||
slug=slug,
|
||||
type=type,
|
||||
user=self.request.user,
|
||||
date_filter=self.request.GET.get("date_filter", None),
|
||||
project_ids=self.request.GET.get("project_ids", None),
|
||||
)
|
||||
|
||||
|
||||
class AdvanceAnalyticsEndpoint(AdvanceAnalyticsBaseView):
|
||||
def get_filtered_counts(self, queryset: QuerySet) -> Dict[str, int]:
|
||||
def get_filtered_count() -> int:
|
||||
if self.filters["analytics_date_range"]:
|
||||
return queryset.filter(
|
||||
created_at__gte=self.filters["analytics_date_range"]["current"][
|
||||
"gte"
|
||||
],
|
||||
created_at__lte=self.filters["analytics_date_range"]["current"][
|
||||
"lte"
|
||||
],
|
||||
).count()
|
||||
return queryset.count()
|
||||
|
||||
def get_previous_count() -> int:
|
||||
if self.filters["analytics_date_range"] and self.filters[
|
||||
"analytics_date_range"
|
||||
].get("previous"):
|
||||
return queryset.filter(
|
||||
created_at__gte=self.filters["analytics_date_range"]["previous"][
|
||||
"gte"
|
||||
],
|
||||
created_at__lte=self.filters["analytics_date_range"]["previous"][
|
||||
"lte"
|
||||
],
|
||||
).count()
|
||||
return 0
|
||||
|
||||
return {
|
||||
"count": get_filtered_count(),
|
||||
# "filter_count": get_previous_count(),
|
||||
}
|
||||
|
||||
def get_overview_data(self) -> Dict[str, Dict[str, int]]:
|
||||
members_query = WorkspaceMember.objects.filter(
|
||||
workspace__slug=self._workspace_slug, is_active=True
|
||||
)
|
||||
|
||||
if self.request.GET.get("project_ids", None):
|
||||
project_ids = self.request.GET.get("project_ids", None)
|
||||
project_ids = [str(project_id) for project_id in project_ids.split(",")]
|
||||
members_query = ProjectMember.objects.filter(
|
||||
project_id__in=project_ids, is_active=True
|
||||
)
|
||||
|
||||
return {
|
||||
"total_users": self.get_filtered_counts(members_query),
|
||||
"total_admins": self.get_filtered_counts(
|
||||
members_query.filter(role=ROLE.ADMIN.value)
|
||||
),
|
||||
"total_members": self.get_filtered_counts(
|
||||
members_query.filter(role=ROLE.MEMBER.value)
|
||||
),
|
||||
"total_guests": self.get_filtered_counts(
|
||||
members_query.filter(role=ROLE.GUEST.value)
|
||||
),
|
||||
"total_projects": self.get_filtered_counts(
|
||||
Project.objects.filter(**self.filters["project_filters"])
|
||||
),
|
||||
"total_work_items": self.get_filtered_counts(
|
||||
Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
),
|
||||
"total_cycles": self.get_filtered_counts(
|
||||
Cycle.objects.filter(**self.filters["base_filters"])
|
||||
),
|
||||
"total_intake": self.get_filtered_counts(
|
||||
Issue.objects.filter(**self.filters["base_filters"]).filter(
|
||||
issue_intake__status__in=["-2", "0"]
|
||||
)
|
||||
),
|
||||
}
|
||||
|
||||
def get_work_items_stats(self) -> Dict[str, Dict[str, int]]:
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
|
||||
return {
|
||||
"total_work_items": self.get_filtered_counts(base_queryset),
|
||||
"started_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="started")
|
||||
),
|
||||
"backlog_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="backlog")
|
||||
),
|
||||
"un_started_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="unstarted")
|
||||
),
|
||||
"completed_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="completed")
|
||||
),
|
||||
}
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
|
||||
def get(self, request: HttpRequest, slug: str) -> Response:
|
||||
self.initialize_workspace(slug, type="analytics")
|
||||
tab = request.GET.get("tab", "overview")
|
||||
|
||||
if tab == "overview":
|
||||
return Response(
|
||||
self.get_overview_data(),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
elif tab == "work-items":
|
||||
return Response(
|
||||
self.get_work_items_stats(),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return Response({"message": "Invalid tab"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class AdvanceAnalyticsStatsEndpoint(AdvanceAnalyticsBaseView):
|
||||
def get_project_issues_stats(self) -> QuerySet:
|
||||
# Get the base queryset with workspace and project filters
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
base_queryset = base_queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
return (
|
||||
base_queryset.values("project_id", "project__name").annotate(
|
||||
cancelled_work_items=Count("id", filter=Q(state__group="cancelled")),
|
||||
completed_work_items=Count("id", filter=Q(state__group="completed")),
|
||||
backlog_work_items=Count("id", filter=Q(state__group="backlog")),
|
||||
un_started_work_items=Count("id", filter=Q(state__group="unstarted")),
|
||||
started_work_items=Count("id", filter=Q(state__group="started")),
|
||||
)
|
||||
.order_by("project_id")
|
||||
)
|
||||
|
||||
def get_work_items_stats(self) -> Dict[str, Dict[str, int]]:
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
return (
|
||||
base_queryset
|
||||
.values("project_id", "project__name")
|
||||
.annotate(
|
||||
cancelled_work_items=Count("id", filter=Q(state__group="cancelled")),
|
||||
completed_work_items=Count("id", filter=Q(state__group="completed")),
|
||||
backlog_work_items=Count("id", filter=Q(state__group="backlog")),
|
||||
un_started_work_items=Count("id", filter=Q(state__group="unstarted")),
|
||||
started_work_items=Count("id", filter=Q(state__group="started")),
|
||||
)
|
||||
.order_by("project_id")
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
|
||||
def get(self, request: HttpRequest, slug: str) -> Response:
|
||||
self.initialize_workspace(slug, type="chart")
|
||||
type = request.GET.get("type", "work-items")
|
||||
|
||||
if type == "work-items":
|
||||
return Response(
|
||||
self.get_work_items_stats(),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response({"message": "Invalid type"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class AdvanceAnalyticsChartEndpoint(AdvanceAnalyticsBaseView):
|
||||
def project_chart(self) -> List[Dict[str, Any]]:
|
||||
# Get the base queryset with workspace and project filters
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
date_filter = {}
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
date_filter = {
|
||||
"created_at__date__gte": start_date,
|
||||
"created_at__date__lte": end_date,
|
||||
}
|
||||
|
||||
total_work_items = base_queryset.filter(**date_filter).count()
|
||||
total_cycles = Cycle.objects.filter(
|
||||
**self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
total_modules = Module.objects.filter(
|
||||
**self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
total_intake = Issue.objects.filter(
|
||||
issue_intake__isnull=False, **self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
total_members = WorkspaceMember.objects.filter(
|
||||
workspace__slug=self._workspace_slug, is_active=True, **date_filter
|
||||
).count()
|
||||
total_pages = ProjectPage.objects.filter(
|
||||
**self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
total_views = IssueView.objects.filter(
|
||||
**self.filters["base_filters"], **date_filter
|
||||
).count()
|
||||
|
||||
data = {
|
||||
"work_items": total_work_items,
|
||||
"cycles": total_cycles,
|
||||
"modules": total_modules,
|
||||
"intake": total_intake,
|
||||
"members": total_members,
|
||||
"pages": total_pages,
|
||||
"views": total_views,
|
||||
}
|
||||
|
||||
return [
|
||||
{
|
||||
"key": key,
|
||||
"name": key.replace("_", " ").title(),
|
||||
"count": value or 0,
|
||||
}
|
||||
for key, value in data.items()
|
||||
]
|
||||
|
||||
def work_item_completion_chart(self) -> Dict[str, Any]:
|
||||
# Get the base queryset
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
.select_related("workspace", "state", "parent")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_module__module", "issue_cycle__cycle"
|
||||
)
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.get(slug=self._workspace_slug)
|
||||
start_date = workspace.created_at.date().replace(day=1)
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
queryset = queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
# Annotate by month and count
|
||||
monthly_stats = (
|
||||
queryset.annotate(month=TruncMonth("created_at"))
|
||||
.values("month")
|
||||
.annotate(
|
||||
created_count=Count("id"),
|
||||
completed_count=Count("id", filter=Q(state__group="completed")),
|
||||
)
|
||||
.order_by("month")
|
||||
)
|
||||
|
||||
# Create dictionary of month -> counts
|
||||
stats_dict = {
|
||||
stat["month"].strftime("%Y-%m-%d"): {
|
||||
"created_count": stat["created_count"],
|
||||
"completed_count": stat["completed_count"],
|
||||
}
|
||||
for stat in monthly_stats
|
||||
}
|
||||
|
||||
# Generate monthly data (ensure months with 0 count are included)
|
||||
data = []
|
||||
# include the current date at the end
|
||||
end_date = timezone.now().date()
|
||||
last_month = end_date.replace(day=1)
|
||||
current_month = start_date
|
||||
|
||||
while current_month <= last_month:
|
||||
date_str = current_month.strftime("%Y-%m-%d")
|
||||
stats = stats_dict.get(date_str, {"created_count": 0, "completed_count": 0})
|
||||
data.append(
|
||||
{
|
||||
"key": date_str,
|
||||
"name": date_str,
|
||||
"count": stats["created_count"],
|
||||
"completed_issues": stats["completed_count"],
|
||||
"created_issues": stats["created_count"],
|
||||
}
|
||||
)
|
||||
# Move to next month
|
||||
if current_month.month == 12:
|
||||
current_month = current_month.replace(
|
||||
year=current_month.year + 1, month=1
|
||||
)
|
||||
else:
|
||||
current_month = current_month.replace(month=current_month.month + 1)
|
||||
|
||||
schema = {
|
||||
"completed_issues": "completed_issues",
|
||||
"created_issues": "created_issues",
|
||||
}
|
||||
|
||||
return {"data": data, "schema": schema}
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
|
||||
def get(self, request: HttpRequest, slug: str) -> Response:
|
||||
self.initialize_workspace(slug, type="chart")
|
||||
type = request.GET.get("type", "projects")
|
||||
group_by = request.GET.get("group_by", None)
|
||||
x_axis = request.GET.get("x_axis", "PRIORITY")
|
||||
|
||||
if type == "projects":
|
||||
return Response(self.project_chart(), status=status.HTTP_200_OK)
|
||||
|
||||
elif type == "custom-work-items":
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
.select_related("workspace", "state", "parent")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_module__module", "issue_cycle__cycle"
|
||||
)
|
||||
)
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
queryset = queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
return Response(
|
||||
build_analytics_chart(queryset, x_axis, group_by),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
elif type == "work-items":
|
||||
return Response(
|
||||
self.work_item_completion_chart(),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response({"message": "Invalid type"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
421
apiserver/plane/app/views/analytic/project_analytics.py
Normal file
421
apiserver/plane/app/views/analytic/project_analytics.py
Normal file
@@ -0,0 +1,421 @@
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from typing import Dict, Any
|
||||
from django.db.models import QuerySet, Q, Count
|
||||
from django.http import HttpRequest
|
||||
from django.db.models.functions import TruncMonth
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
from plane.app.views.base import BaseAPIView
|
||||
from plane.app.permissions import ROLE, allow_permission
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
Issue,
|
||||
Cycle,
|
||||
Module,
|
||||
CycleIssue,
|
||||
ModuleIssue,
|
||||
)
|
||||
from django.db import models
|
||||
from django.db.models import F, Case, When, Value
|
||||
from django.db.models.functions import Concat
|
||||
from plane.utils.build_chart import build_analytics_chart
|
||||
from plane.utils.date_utils import (
|
||||
get_analytics_filters,
|
||||
)
|
||||
|
||||
|
||||
class ProjectAdvanceAnalyticsBaseView(BaseAPIView):
|
||||
def initialize_workspace(self, slug: str, type: str) -> None:
|
||||
self._workspace_slug = slug
|
||||
self.filters = get_analytics_filters(
|
||||
slug=slug,
|
||||
type=type,
|
||||
user=self.request.user,
|
||||
date_filter=self.request.GET.get("date_filter", None),
|
||||
project_ids=self.request.GET.get("project_ids", None),
|
||||
)
|
||||
|
||||
|
||||
class ProjectAdvanceAnalyticsEndpoint(ProjectAdvanceAnalyticsBaseView):
|
||||
def get_filtered_counts(self, queryset: QuerySet) -> Dict[str, int]:
|
||||
def get_filtered_count() -> int:
|
||||
if self.filters["analytics_date_range"]:
|
||||
return queryset.filter(
|
||||
created_at__gte=self.filters["analytics_date_range"]["current"][
|
||||
"gte"
|
||||
],
|
||||
created_at__lte=self.filters["analytics_date_range"]["current"][
|
||||
"lte"
|
||||
],
|
||||
).count()
|
||||
return queryset.count()
|
||||
|
||||
return {
|
||||
"count": get_filtered_count(),
|
||||
}
|
||||
|
||||
def get_work_items_stats(
|
||||
self, project_id, cycle_id=None, module_id=None
|
||||
) -> Dict[str, Dict[str, int]]:
|
||||
"""
|
||||
Returns work item stats for the workspace, or filtered by cycle_id or module_id if provided.
|
||||
"""
|
||||
base_queryset = None
|
||||
if cycle_id is not None:
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
**self.filters["base_filters"], cycle_id=cycle_id
|
||||
).values_list("issue_id", flat=True)
|
||||
base_queryset = Issue.issue_objects.filter(id__in=cycle_issues)
|
||||
elif module_id is not None:
|
||||
module_issues = ModuleIssue.objects.filter(
|
||||
**self.filters["base_filters"], module_id=module_id
|
||||
).values_list("issue_id", flat=True)
|
||||
base_queryset = Issue.issue_objects.filter(id__in=module_issues)
|
||||
else:
|
||||
base_queryset = Issue.issue_objects.filter(
|
||||
**self.filters["base_filters"], project_id=project_id
|
||||
)
|
||||
|
||||
return {
|
||||
"total_work_items": self.get_filtered_counts(base_queryset),
|
||||
"started_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="started")
|
||||
),
|
||||
"backlog_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="backlog")
|
||||
),
|
||||
"un_started_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="unstarted")
|
||||
),
|
||||
"completed_work_items": self.get_filtered_counts(
|
||||
base_queryset.filter(state__group="completed")
|
||||
),
|
||||
}
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def get(self, request: HttpRequest, slug: str, project_id: str) -> Response:
|
||||
self.initialize_workspace(slug, type="analytics")
|
||||
|
||||
# Optionally accept cycle_id or module_id as query params
|
||||
cycle_id = request.GET.get("cycle_id", None)
|
||||
module_id = request.GET.get("module_id", None)
|
||||
return Response(
|
||||
self.get_work_items_stats(
|
||||
cycle_id=cycle_id, module_id=module_id, project_id=project_id
|
||||
),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class ProjectAdvanceAnalyticsStatsEndpoint(ProjectAdvanceAnalyticsBaseView):
|
||||
def get_project_issues_stats(self) -> QuerySet:
|
||||
# Get the base queryset with workspace and project filters
|
||||
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
base_queryset = base_queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
return (
|
||||
base_queryset.values("project_id", "project__name")
|
||||
.annotate(
|
||||
cancelled_work_items=Count("id", filter=Q(state__group="cancelled")),
|
||||
completed_work_items=Count("id", filter=Q(state__group="completed")),
|
||||
backlog_work_items=Count("id", filter=Q(state__group="backlog")),
|
||||
un_started_work_items=Count("id", filter=Q(state__group="unstarted")),
|
||||
started_work_items=Count("id", filter=Q(state__group="started")),
|
||||
)
|
||||
.order_by("project_id")
|
||||
)
|
||||
|
||||
def get_work_items_stats(
|
||||
self, project_id, cycle_id=None, module_id=None
|
||||
) -> Dict[str, Dict[str, int]]:
|
||||
base_queryset = None
|
||||
if cycle_id is not None:
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
**self.filters["base_filters"], cycle_id=cycle_id
|
||||
).values_list("issue_id", flat=True)
|
||||
base_queryset = Issue.issue_objects.filter(id__in=cycle_issues)
|
||||
elif module_id is not None:
|
||||
module_issues = ModuleIssue.objects.filter(
|
||||
**self.filters["base_filters"], module_id=module_id
|
||||
).values_list("issue_id", flat=True)
|
||||
base_queryset = Issue.issue_objects.filter(id__in=module_issues)
|
||||
else:
|
||||
base_queryset = Issue.issue_objects.filter(
|
||||
**self.filters["base_filters"], project_id=project_id
|
||||
)
|
||||
return (
|
||||
base_queryset.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.annotate(
|
||||
avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(
|
||||
assignees__avatar_asset__isnull=True, then="assignees__avatar"
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.values("display_name", "assignee_id", "avatar_url")
|
||||
.annotate(
|
||||
cancelled_work_items=Count(
|
||||
"id", filter=Q(state__group="cancelled"), distinct=True
|
||||
),
|
||||
completed_work_items=Count(
|
||||
"id", filter=Q(state__group="completed"), distinct=True
|
||||
),
|
||||
backlog_work_items=Count(
|
||||
"id", filter=Q(state__group="backlog"), distinct=True
|
||||
),
|
||||
un_started_work_items=Count(
|
||||
"id", filter=Q(state__group="unstarted"), distinct=True
|
||||
),
|
||||
started_work_items=Count(
|
||||
"id", filter=Q(state__group="started"), distinct=True
|
||||
),
|
||||
)
|
||||
.order_by("display_name")
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def get(self, request: HttpRequest, slug: str, project_id: str) -> Response:
|
||||
self.initialize_workspace(slug, type="chart")
|
||||
type = request.GET.get("type", "work-items")
|
||||
|
||||
if type == "work-items":
|
||||
# Optionally accept cycle_id or module_id as query params
|
||||
cycle_id = request.GET.get("cycle_id", None)
|
||||
module_id = request.GET.get("module_id", None)
|
||||
return Response(
|
||||
self.get_work_items_stats(
|
||||
project_id=project_id, cycle_id=cycle_id, module_id=module_id
|
||||
),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response({"message": "Invalid type"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class ProjectAdvanceAnalyticsChartEndpoint(ProjectAdvanceAnalyticsBaseView):
|
||||
def work_item_completion_chart(
|
||||
self, project_id, cycle_id=None, module_id=None
|
||||
) -> Dict[str, Any]:
|
||||
# Get the base queryset
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
.filter(project_id=project_id)
|
||||
.select_related("workspace", "state", "parent")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_module__module", "issue_cycle__cycle"
|
||||
)
|
||||
)
|
||||
|
||||
if cycle_id is not None:
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
**self.filters["base_filters"], cycle_id=cycle_id
|
||||
).values_list("issue_id", flat=True)
|
||||
cycle = Cycle.objects.filter(id=cycle_id).first()
|
||||
if cycle and cycle.start_date:
|
||||
start_date = cycle.start_date.date()
|
||||
end_date = cycle.end_date.date()
|
||||
else:
|
||||
return {"data": [], "schema": {}}
|
||||
queryset = cycle_issues
|
||||
|
||||
elif module_id is not None:
|
||||
module_issues = ModuleIssue.objects.filter(
|
||||
**self.filters["base_filters"], module_id=module_id
|
||||
).values_list("issue_id", flat=True)
|
||||
module = Module.objects.filter(id=module_id).first()
|
||||
if module and module.start_date:
|
||||
start_date = module.start_date
|
||||
end_date = module.target_date
|
||||
else:
|
||||
return {"data": [], "schema": {}}
|
||||
queryset = module_issues
|
||||
|
||||
else:
|
||||
project = Project.objects.filter(id=project_id).first()
|
||||
if project.created_at:
|
||||
start_date = project.created_at.date().replace(day=1)
|
||||
else:
|
||||
return {"data": [], "schema": {}}
|
||||
|
||||
if cycle_id or module_id:
|
||||
# Get daily stats with optimized query
|
||||
daily_stats = (
|
||||
queryset.values("created_at__date")
|
||||
.annotate(
|
||||
created_count=Count("id"),
|
||||
completed_count=Count(
|
||||
"id", filter=Q(issue__state__group="completed")
|
||||
),
|
||||
)
|
||||
.order_by("created_at__date")
|
||||
)
|
||||
|
||||
# Create a dictionary of existing stats with summed counts
|
||||
stats_dict = {
|
||||
stat["created_at__date"].strftime("%Y-%m-%d"): {
|
||||
"created_count": stat["created_count"],
|
||||
"completed_count": stat["completed_count"],
|
||||
}
|
||||
for stat in daily_stats
|
||||
}
|
||||
|
||||
# Generate data for all days in the range
|
||||
data = []
|
||||
current_date = start_date
|
||||
while current_date <= end_date:
|
||||
date_str = current_date.strftime("%Y-%m-%d")
|
||||
stats = stats_dict.get(
|
||||
date_str, {"created_count": 0, "completed_count": 0}
|
||||
)
|
||||
data.append(
|
||||
{
|
||||
"key": date_str,
|
||||
"name": date_str,
|
||||
"count": stats["created_count"] + stats["completed_count"],
|
||||
"completed_issues": stats["completed_count"],
|
||||
"created_issues": stats["created_count"],
|
||||
}
|
||||
)
|
||||
current_date += timedelta(days=1)
|
||||
else:
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
queryset = queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
# Annotate by month and count
|
||||
monthly_stats = (
|
||||
queryset.annotate(month=TruncMonth("created_at"))
|
||||
.values("month")
|
||||
.annotate(
|
||||
created_count=Count("id"),
|
||||
completed_count=Count("id", filter=Q(state__group="completed")),
|
||||
)
|
||||
.order_by("month")
|
||||
)
|
||||
|
||||
# Create dictionary of month -> counts
|
||||
stats_dict = {
|
||||
stat["month"].strftime("%Y-%m-%d"): {
|
||||
"created_count": stat["created_count"],
|
||||
"completed_count": stat["completed_count"],
|
||||
}
|
||||
for stat in monthly_stats
|
||||
}
|
||||
|
||||
# Generate monthly data (ensure months with 0 count are included)
|
||||
data = []
|
||||
# include the current date at the end
|
||||
end_date = timezone.now().date()
|
||||
last_month = end_date.replace(day=1)
|
||||
current_month = start_date
|
||||
|
||||
while current_month <= last_month:
|
||||
date_str = current_month.strftime("%Y-%m-%d")
|
||||
stats = stats_dict.get(
|
||||
date_str, {"created_count": 0, "completed_count": 0}
|
||||
)
|
||||
data.append(
|
||||
{
|
||||
"key": date_str,
|
||||
"name": date_str,
|
||||
"count": stats["created_count"],
|
||||
"completed_issues": stats["completed_count"],
|
||||
"created_issues": stats["created_count"],
|
||||
}
|
||||
)
|
||||
# Move to next month
|
||||
if current_month.month == 12:
|
||||
current_month = current_month.replace(
|
||||
year=current_month.year + 1, month=1
|
||||
)
|
||||
else:
|
||||
current_month = current_month.replace(month=current_month.month + 1)
|
||||
|
||||
schema = {
|
||||
"completed_issues": "completed_issues",
|
||||
"created_issues": "created_issues",
|
||||
}
|
||||
|
||||
return {"data": data, "schema": schema}
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def get(self, request: HttpRequest, slug: str, project_id: str) -> Response:
|
||||
self.initialize_workspace(slug, type="chart")
|
||||
type = request.GET.get("type", "projects")
|
||||
group_by = request.GET.get("group_by", None)
|
||||
x_axis = request.GET.get("x_axis", "PRIORITY")
|
||||
cycle_id = request.GET.get("cycle_id", None)
|
||||
module_id = request.GET.get("module_id", None)
|
||||
|
||||
if type == "custom-work-items":
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(**self.filters["base_filters"])
|
||||
.filter(project_id=project_id)
|
||||
.select_related("workspace", "state", "parent")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_module__module", "issue_cycle__cycle"
|
||||
)
|
||||
)
|
||||
|
||||
# Apply cycle/module filters if present
|
||||
if cycle_id is not None:
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
**self.filters["base_filters"], cycle_id=cycle_id
|
||||
).values_list("issue_id", flat=True)
|
||||
queryset = queryset.filter(id__in=cycle_issues)
|
||||
|
||||
elif module_id is not None:
|
||||
module_issues = ModuleIssue.objects.filter(
|
||||
**self.filters["base_filters"], module_id=module_id
|
||||
).values_list("issue_id", flat=True)
|
||||
queryset = queryset.filter(id__in=module_issues)
|
||||
|
||||
# Apply date range filter if available
|
||||
if self.filters["chart_period_range"]:
|
||||
start_date, end_date = self.filters["chart_period_range"]
|
||||
queryset = queryset.filter(
|
||||
created_at__date__gte=start_date, created_at__date__lte=end_date
|
||||
)
|
||||
|
||||
return Response(
|
||||
build_analytics_chart(queryset, x_axis, group_by),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
elif type == "work-items":
|
||||
# Optionally accept cycle_id or module_id as query params
|
||||
cycle_id = request.GET.get("cycle_id", None)
|
||||
module_id = request.GET.get("module_id", None)
|
||||
|
||||
return Response(
|
||||
self.work_item_completion_chart(
|
||||
project_id=project_id, cycle_id=cycle_id, module_id=module_id
|
||||
),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response({"message": "Invalid type"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -707,3 +707,14 @@ class ProjectBulkAssetEndpoint(BaseAPIView):
|
||||
pass
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class AssetCheckEndpoint(BaseAPIView):
|
||||
"""Endpoint to check if an asset exists."""
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE")
|
||||
def get(self, request, slug, asset_id):
|
||||
asset = FileAsset.all_objects.filter(
|
||||
id=asset_id, workspace__slug=slug, deleted_at__isnull=True
|
||||
).exists()
|
||||
return Response({"exists": asset}, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -1119,14 +1119,13 @@ class CycleUserPropertiesEndpoint(BaseAPIView):
|
||||
class CycleProgressEndpoint(BaseAPIView):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def get(self, request, slug, project_id, cycle_id):
|
||||
|
||||
cycle = Cycle.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, id=cycle_id
|
||||
).first()
|
||||
if not cycle:
|
||||
return Response(
|
||||
{"error": "Cycle not found"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
)
|
||||
aggregate_estimates = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
@@ -1177,7 +1176,7 @@ class CycleProgressEndpoint(BaseAPIView):
|
||||
),
|
||||
)
|
||||
)
|
||||
if cycle.progress_snapshot:
|
||||
if cycle.progress_snapshot:
|
||||
backlog_issues = cycle.progress_snapshot.get("backlog_issues", 0)
|
||||
unstarted_issues = cycle.progress_snapshot.get("unstarted_issues", 0)
|
||||
started_issues = cycle.progress_snapshot.get("started_issues", 0)
|
||||
|
||||
@@ -29,6 +29,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class CycleIssueViewSet(BaseViewSet):
|
||||
serializer_class = CycleIssueSerializer
|
||||
model = CycleIssue
|
||||
|
||||
69
apiserver/plane/app/views/external/base.py
vendored
69
apiserver/plane/app/views/external/base.py
vendored
@@ -11,8 +11,7 @@ from rest_framework.response import Response
|
||||
|
||||
# Module import
|
||||
from plane.app.permissions import ROLE, allow_permission
|
||||
from plane.app.serializers import (ProjectLiteSerializer,
|
||||
WorkspaceLiteSerializer)
|
||||
from plane.app.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer
|
||||
from plane.db.models import Project, Workspace
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.utils.exception_logger import log_exception
|
||||
@@ -22,6 +21,7 @@ from ..base import BaseAPIView
|
||||
|
||||
class LLMProvider:
|
||||
"""Base class for LLM provider configurations"""
|
||||
|
||||
name: str = ""
|
||||
models: List[str] = []
|
||||
default_model: str = ""
|
||||
@@ -34,11 +34,13 @@ class LLMProvider:
|
||||
"default_model": cls.default_model,
|
||||
}
|
||||
|
||||
|
||||
class OpenAIProvider(LLMProvider):
|
||||
name = "OpenAI"
|
||||
models = ["gpt-3.5-turbo", "gpt-4o-mini", "gpt-4o", "o1-mini", "o1-preview"]
|
||||
default_model = "gpt-4o-mini"
|
||||
|
||||
|
||||
class AnthropicProvider(LLMProvider):
|
||||
name = "Anthropic"
|
||||
models = [
|
||||
@@ -49,40 +51,45 @@ class AnthropicProvider(LLMProvider):
|
||||
"claude-2.1",
|
||||
"claude-2",
|
||||
"claude-instant-1.2",
|
||||
"claude-instant-1"
|
||||
"claude-instant-1",
|
||||
]
|
||||
default_model = "claude-3-sonnet-20240229"
|
||||
|
||||
|
||||
class GeminiProvider(LLMProvider):
|
||||
name = "Gemini"
|
||||
models = ["gemini-pro", "gemini-1.5-pro-latest", "gemini-pro-vision"]
|
||||
default_model = "gemini-pro"
|
||||
|
||||
|
||||
SUPPORTED_PROVIDERS = {
|
||||
"openai": OpenAIProvider,
|
||||
"anthropic": AnthropicProvider,
|
||||
"gemini": GeminiProvider,
|
||||
}
|
||||
|
||||
|
||||
def get_llm_config() -> Tuple[str | None, str | None, str | None]:
|
||||
"""
|
||||
Helper to get LLM configuration values, returns:
|
||||
- api_key, model, provider
|
||||
"""
|
||||
api_key, provider_key, model = get_configuration_value([
|
||||
{
|
||||
"key": "LLM_API_KEY",
|
||||
"default": os.environ.get("LLM_API_KEY", None),
|
||||
},
|
||||
{
|
||||
"key": "LLM_PROVIDER",
|
||||
"default": os.environ.get("LLM_PROVIDER", "openai"),
|
||||
},
|
||||
{
|
||||
"key": "LLM_MODEL",
|
||||
"default": os.environ.get("LLM_MODEL", None),
|
||||
},
|
||||
])
|
||||
api_key, provider_key, model = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "LLM_API_KEY",
|
||||
"default": os.environ.get("LLM_API_KEY", None),
|
||||
},
|
||||
{
|
||||
"key": "LLM_PROVIDER",
|
||||
"default": os.environ.get("LLM_PROVIDER", "openai"),
|
||||
},
|
||||
{
|
||||
"key": "LLM_MODEL",
|
||||
"default": os.environ.get("LLM_MODEL", None),
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
provider = SUPPORTED_PROVIDERS.get(provider_key.lower())
|
||||
if not provider:
|
||||
@@ -99,16 +106,20 @@ def get_llm_config() -> Tuple[str | None, str | None, str | None]:
|
||||
|
||||
# Validate model is supported by provider
|
||||
if model not in provider.models:
|
||||
log_exception(ValueError(
|
||||
f"Model {model} not supported by {provider.name}. "
|
||||
f"Supported models: {', '.join(provider.models)}"
|
||||
))
|
||||
log_exception(
|
||||
ValueError(
|
||||
f"Model {model} not supported by {provider.name}. "
|
||||
f"Supported models: {', '.join(provider.models)}"
|
||||
)
|
||||
)
|
||||
return None, None, None
|
||||
|
||||
return api_key, model, provider_key
|
||||
|
||||
|
||||
def get_llm_response(task, prompt, api_key: str, model: str, provider: str) -> Tuple[str | None, str | None]:
|
||||
def get_llm_response(
|
||||
task, prompt, api_key: str, model: str, provider: str
|
||||
) -> Tuple[str | None, str | None]:
|
||||
"""Helper to get LLM completion response"""
|
||||
final_text = task + "\n" + prompt
|
||||
try:
|
||||
@@ -118,10 +129,7 @@ def get_llm_response(task, prompt, api_key: str, model: str, provider: str) -> T
|
||||
|
||||
client = OpenAI(api_key=api_key)
|
||||
chat_completion = client.chat.completions.create(
|
||||
model=model,
|
||||
messages=[
|
||||
{"role": "user", "content": final_text}
|
||||
]
|
||||
model=model, messages=[{"role": "user", "content": final_text}]
|
||||
)
|
||||
text = chat_completion.choices[0].message.content
|
||||
return text, None
|
||||
@@ -135,6 +143,7 @@ def get_llm_response(task, prompt, api_key: str, model: str, provider: str) -> T
|
||||
else:
|
||||
return None, f"Error occurred while generating response from {provider}"
|
||||
|
||||
|
||||
class GPTIntegrationEndpoint(BaseAPIView):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def post(self, request, slug, project_id):
|
||||
@@ -152,7 +161,9 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
||||
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
text, error = get_llm_response(task, request.data.get("prompt", False), api_key, model, provider)
|
||||
text, error = get_llm_response(
|
||||
task, request.data.get("prompt", False), api_key, model, provider
|
||||
)
|
||||
if not text and error:
|
||||
return Response(
|
||||
{"error": "An internal error has occurred."},
|
||||
@@ -190,7 +201,9 @@ class WorkspaceGPTIntegrationEndpoint(BaseAPIView):
|
||||
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
text, error = get_llm_response(task, request.data.get("prompt", False), api_key, model, provider)
|
||||
text, error = get_llm_response(
|
||||
task, request.data.get("prompt", False), api_key, model, provider
|
||||
)
|
||||
if not text and error:
|
||||
return Response(
|
||||
{"error": "An internal error has occurred."},
|
||||
|
||||
@@ -38,6 +38,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.utils.error_codes import ERROR_CODES
|
||||
from plane.utils.host import base_host
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet, BaseAPIView
|
||||
|
||||
|
||||
@@ -23,6 +23,7 @@ from plane.settings.storage import S3Storage
|
||||
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueAttachmentEndpoint(BaseAPIView):
|
||||
serializer_class = IssueAttachmentSerializer
|
||||
model = FileAsset
|
||||
|
||||
@@ -19,6 +19,7 @@ from plane.db.models import IssueComment, ProjectMember, CommentReaction, Projec
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueCommentViewSet(BaseViewSet):
|
||||
serializer_class = IssueCommentSerializer
|
||||
model = IssueComment
|
||||
|
||||
@@ -15,8 +15,10 @@ from plane.app.serializers import IssueLinkSerializer
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import IssueLink
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.bgtasks.work_item_link_task import crawl_work_item_link_title
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueLinkViewSet(BaseViewSet):
|
||||
permission_classes = [ProjectEntityPermission]
|
||||
|
||||
@@ -43,6 +45,9 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
serializer = IssueLinkSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(project_id=project_id, issue_id=issue_id)
|
||||
crawl_work_item_link_title.delay(
|
||||
serializer.data.get("id"), serializer.data.get("url")
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="link.activity.created",
|
||||
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
@@ -54,6 +59,10 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
notification=True,
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
issue_link = self.get_queryset().get(id=serializer.data.get("id"))
|
||||
serializer = IssueLinkSerializer(issue_link)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@@ -65,9 +74,14 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
current_instance = json.dumps(
|
||||
IssueLinkSerializer(issue_link).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
crawl_work_item_link_title.delay(
|
||||
serializer.data.get("id"), serializer.data.get("url")
|
||||
)
|
||||
|
||||
issue_activity.delay(
|
||||
type="link.activity.updated",
|
||||
requested_data=requested_data,
|
||||
@@ -79,6 +93,9 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
notification=True,
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue_link = self.get_queryset().get(id=serializer.data.get("id"))
|
||||
serializer = IssueLinkSerializer(issue_link)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ from plane.db.models import IssueReaction
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueReactionViewSet(BaseViewSet):
|
||||
serializer_class = IssueReactionSerializer
|
||||
model = IssueReaction
|
||||
|
||||
@@ -29,6 +29,7 @@ from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_relation_mapper import get_actual_relation
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueRelationViewSet(BaseViewSet):
|
||||
serializer_class = IssueRelationSerializer
|
||||
model = IssueRelation
|
||||
|
||||
@@ -25,6 +25,7 @@ from collections import defaultdict
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
|
||||
|
||||
class SubIssuesEndpoint(BaseAPIView):
|
||||
permission_classes = [ProjectEntityPermission]
|
||||
|
||||
|
||||
@@ -63,6 +63,7 @@ from .. import BaseAPIView, BaseViewSet
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ModuleViewSet(BaseViewSet):
|
||||
model = Module
|
||||
webhook_event = "module"
|
||||
|
||||
@@ -36,6 +36,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
|
||||
from .. import BaseViewSet
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ModuleIssueViewSet(BaseViewSet):
|
||||
serializer_class = ModuleIssueSerializer
|
||||
model = ModuleIssue
|
||||
@@ -280,7 +281,11 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
{"module_name": module_issue.first().module.name if (module_issue.first() and module_issue.first().module) else None}
|
||||
{
|
||||
"module_name": module_issue.first().module.name
|
||||
if (module_issue.first() and module_issue.first().module)
|
||||
else None
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
|
||||
@@ -42,6 +42,7 @@ from plane.bgtasks.page_version_task import page_version
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
from plane.bgtasks.copy_s3_object import copy_s3_objects
|
||||
|
||||
|
||||
def unarchive_archive_page_and_descendants(page_id, archived_at):
|
||||
# Your SQL query
|
||||
sql = """
|
||||
@@ -198,7 +199,7 @@ class PageViewSet(BaseViewSet):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
|
||||
"""
|
||||
if the role is guest and guest_view_all_features is false and owned by is not
|
||||
if the role is guest and guest_view_all_features is false and owned by is not
|
||||
the requesting user then dont show the page
|
||||
"""
|
||||
|
||||
@@ -572,6 +573,12 @@ class PageDuplicateEndpoint(BaseAPIView):
|
||||
pk=page_id, workspace__slug=slug, projects__id=project_id
|
||||
).first()
|
||||
|
||||
# check for permission
|
||||
if page.access == Page.PRIVATE_ACCESS and page.owned_by_id != request.user.id:
|
||||
return Response(
|
||||
{"error": "Permission denied"}, status=status.HTTP_403_FORBIDDEN
|
||||
)
|
||||
|
||||
# get all the project ids where page is present
|
||||
project_ids = ProjectPage.objects.filter(page_id=page_id).values_list(
|
||||
"project_id", flat=True
|
||||
|
||||
@@ -445,7 +445,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
project = Project.objects.get(pk=pk)
|
||||
project = Project.objects.get(pk=pk, workspace__slug=slug)
|
||||
project.delete()
|
||||
webhook_activity.delay(
|
||||
event="project",
|
||||
|
||||
@@ -29,6 +29,7 @@ from plane.db.models import (
|
||||
from plane.db.models.project import ProjectNetwork
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ProjectInvitationsViewset(BaseViewSet):
|
||||
serializer_class = ProjectMemberInviteSerializer
|
||||
model = ProjectMemberInvite
|
||||
|
||||
@@ -168,6 +168,8 @@ class ProjectMemberViewSet(BaseViewSet):
|
||||
workspace__slug=slug,
|
||||
member__is_bot=False,
|
||||
is_active=True,
|
||||
member__member_workspace__workspace__slug=slug,
|
||||
member__member_workspace__is_active=True,
|
||||
).select_related("project", "member", "workspace")
|
||||
|
||||
serializer = ProjectMemberRoleSerializer(
|
||||
@@ -313,7 +315,11 @@ class UserProjectRolesEndpoint(BaseAPIView):
|
||||
|
||||
def get(self, request, slug):
|
||||
project_members = ProjectMember.objects.filter(
|
||||
workspace__slug=slug, member_id=request.user.id, is_active=True
|
||||
workspace__slug=slug,
|
||||
member_id=request.user.id,
|
||||
is_active=True,
|
||||
member__member_workspace__workspace__slug=slug,
|
||||
member__member_workspace__is_active=True,
|
||||
).values("project_id", "role")
|
||||
|
||||
project_members = {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Django imports
|
||||
from django.db.models import Q
|
||||
from django.db.models import Q, QuerySet
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
@@ -12,6 +12,95 @@ from plane.utils.issue_search import search_issues
|
||||
|
||||
|
||||
class IssueSearchEndpoint(BaseAPIView):
|
||||
def filter_issues_by_project(self, project_id: int, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Filter issues by project
|
||||
"""
|
||||
|
||||
issues = issues.filter(project_id=project_id)
|
||||
|
||||
return issues
|
||||
|
||||
def search_issues_by_query(self, query: str, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Search issues by query
|
||||
"""
|
||||
|
||||
issues = search_issues(query, issues)
|
||||
|
||||
return issues
|
||||
|
||||
def search_issues_and_excluding_parent(
|
||||
self, issues: QuerySet, issue_id: str
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Search issues and epics by query excluding the parent
|
||||
"""
|
||||
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
if issue:
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id), ~Q(pk=issue.parent_id), ~Q(parent_id=issue_id)
|
||||
)
|
||||
return issues
|
||||
|
||||
def filter_issues_excluding_related_issues(
|
||||
self, issue_id: str, issues: QuerySet
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Filter issues excluding related issues
|
||||
"""
|
||||
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
related_issue_ids = (
|
||||
IssueRelation.objects.filter(Q(related_issue=issue) | Q(issue=issue))
|
||||
.values_list("issue_id", "related_issue_id")
|
||||
.distinct()
|
||||
)
|
||||
|
||||
related_issue_ids = [item for sublist in related_issue_ids for item in sublist]
|
||||
|
||||
if issue:
|
||||
issues = issues.filter(~Q(pk=issue_id), ~Q(pk__in=related_issue_ids))
|
||||
|
||||
return issues
|
||||
|
||||
def filter_root_issues_only(self, issue_id: str, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Filter root issues only
|
||||
"""
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
if issue:
|
||||
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
|
||||
if issue.parent:
|
||||
issues = issues.filter(~Q(pk=issue.parent_id))
|
||||
return issues
|
||||
|
||||
def exclude_issues_in_cycles(self, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Exclude issues in cycles
|
||||
"""
|
||||
issues = issues.exclude(
|
||||
Q(issue_cycle__isnull=False) & Q(issue_cycle__deleted_at__isnull=True)
|
||||
)
|
||||
return issues
|
||||
|
||||
def exclude_issues_in_module(self, issues: QuerySet, module: str) -> QuerySet:
|
||||
"""
|
||||
Exclude issues in a module
|
||||
"""
|
||||
issues = issues.exclude(
|
||||
Q(issue_module__module=module) & Q(issue_module__deleted_at__isnull=True)
|
||||
)
|
||||
return issues
|
||||
|
||||
def filter_issues_without_target_date(self, issues: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Filter issues without a target date
|
||||
"""
|
||||
issues = issues.filter(target_date__isnull=True)
|
||||
return issues
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
query = request.query_params.get("search", False)
|
||||
workspace_search = request.query_params.get("workspace_search", "false")
|
||||
@@ -21,7 +110,6 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
module = request.query_params.get("module", False)
|
||||
sub_issue = request.query_params.get("sub_issue", "false")
|
||||
target_date = request.query_params.get("target_date", True)
|
||||
|
||||
issue_id = request.query_params.get("issue_id", False)
|
||||
|
||||
issues = Issue.issue_objects.filter(
|
||||
@@ -32,52 +120,28 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
if workspace_search == "false":
|
||||
issues = issues.filter(project_id=project_id)
|
||||
issues = self.filter_issues_by_project(project_id, issues)
|
||||
|
||||
if query:
|
||||
issues = search_issues(query, issues)
|
||||
issues = self.search_issues_by_query(query, issues)
|
||||
|
||||
if parent == "true" and issue_id:
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
if issue:
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id), ~Q(pk=issue.parent_id), ~Q(parent_id=issue_id)
|
||||
)
|
||||
issues = self.search_issues_and_excluding_parent(issues, issue_id)
|
||||
|
||||
if issue_relation == "true" and issue_id:
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
related_issue_ids = IssueRelation.objects.filter(
|
||||
Q(related_issue=issue) | Q(issue=issue)
|
||||
).values_list(
|
||||
"issue_id", "related_issue_id"
|
||||
).distinct()
|
||||
issues = self.filter_issues_excluding_related_issues(issue_id, issues)
|
||||
|
||||
related_issue_ids = [item for sublist in related_issue_ids for item in sublist]
|
||||
|
||||
if issue:
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id),
|
||||
~Q(pk__in=related_issue_ids),
|
||||
)
|
||||
if sub_issue == "true" and issue_id:
|
||||
issue = Issue.issue_objects.filter(pk=issue_id).first()
|
||||
if issue:
|
||||
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
|
||||
if issue.parent:
|
||||
issues = issues.filter(~Q(pk=issue.parent_id))
|
||||
issues = self.filter_root_issues_only(issue_id, issues)
|
||||
|
||||
if cycle == "true":
|
||||
issues = issues.exclude(
|
||||
Q(issue_cycle__isnull=False) & Q(issue_cycle__deleted_at__isnull=True)
|
||||
)
|
||||
issues = self.exclude_issues_in_cycles(issues)
|
||||
|
||||
if module:
|
||||
issues = issues.exclude(
|
||||
Q(issue_module__module=module)
|
||||
& Q(issue_module__deleted_at__isnull=True)
|
||||
)
|
||||
issues = self.exclude_issues_in_module(issues, module)
|
||||
|
||||
if target_date == "none":
|
||||
issues = issues.filter(target_date__isnull=True)
|
||||
issues = self.filter_issues_without_target_date(issues)
|
||||
|
||||
if ProjectMember.objects.filter(
|
||||
project_id=project_id, member=self.request.user, is_active=True, role=5
|
||||
|
||||
@@ -24,125 +24,152 @@ class TimezoneEndpoint(APIView):
|
||||
@method_decorator(cache_page(60 * 60 * 2))
|
||||
def get(self, request):
|
||||
timezone_locations = [
|
||||
('Midway Island', 'Pacific/Midway'), # UTC-11:00
|
||||
('American Samoa', 'Pacific/Pago_Pago'), # UTC-11:00
|
||||
('Hawaii', 'Pacific/Honolulu'), # UTC-10:00
|
||||
('Aleutian Islands', 'America/Adak'), # UTC-10:00 (DST: UTC-09:00)
|
||||
('Marquesas Islands', 'Pacific/Marquesas'), # UTC-09:30
|
||||
('Alaska', 'America/Anchorage'), # UTC-09:00 (DST: UTC-08:00)
|
||||
('Gambier Islands', 'Pacific/Gambier'), # UTC-09:00
|
||||
('Pacific Time (US and Canada)', 'America/Los_Angeles'), # UTC-08:00 (DST: UTC-07:00)
|
||||
('Baja California', 'America/Tijuana'), # UTC-08:00 (DST: UTC-07:00)
|
||||
('Mountain Time (US and Canada)', 'America/Denver'), # UTC-07:00 (DST: UTC-06:00)
|
||||
('Arizona', 'America/Phoenix'), # UTC-07:00
|
||||
('Chihuahua, Mazatlan', 'America/Chihuahua'), # UTC-07:00 (DST: UTC-06:00)
|
||||
('Central Time (US and Canada)', 'America/Chicago'), # UTC-06:00 (DST: UTC-05:00)
|
||||
('Saskatchewan', 'America/Regina'), # UTC-06:00
|
||||
('Guadalajara, Mexico City, Monterrey', 'America/Mexico_City'), # UTC-06:00 (DST: UTC-05:00)
|
||||
('Tegucigalpa, Honduras', 'America/Tegucigalpa'), # UTC-06:00
|
||||
('Costa Rica', 'America/Costa_Rica'), # UTC-06:00
|
||||
('Eastern Time (US and Canada)', 'America/New_York'), # UTC-05:00 (DST: UTC-04:00)
|
||||
('Lima', 'America/Lima'), # UTC-05:00
|
||||
('Bogota', 'America/Bogota'), # UTC-05:00
|
||||
('Quito', 'America/Guayaquil'), # UTC-05:00
|
||||
('Chetumal', 'America/Cancun'), # UTC-05:00 (DST: UTC-04:00)
|
||||
('Caracas (Old Venezuela Time)', 'America/Caracas'), # UTC-04:30
|
||||
('Atlantic Time (Canada)', 'America/Halifax'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('Caracas', 'America/Caracas'), # UTC-04:00
|
||||
('Santiago', 'America/Santiago'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('La Paz', 'America/La_Paz'), # UTC-04:00
|
||||
('Manaus', 'America/Manaus'), # UTC-04:00
|
||||
('Georgetown', 'America/Guyana'), # UTC-04:00
|
||||
('Bermuda', 'Atlantic/Bermuda'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('Newfoundland Time (Canada)', 'America/St_Johns'), # UTC-03:30 (DST: UTC-02:30)
|
||||
('Buenos Aires', 'America/Argentina/Buenos_Aires'), # UTC-03:00
|
||||
('Brasilia', 'America/Sao_Paulo'), # UTC-03:00
|
||||
('Greenland', 'America/Godthab'), # UTC-03:00 (DST: UTC-02:00)
|
||||
('Montevideo', 'America/Montevideo'), # UTC-03:00
|
||||
('Falkland Islands', 'Atlantic/Stanley'), # UTC-03:00
|
||||
('South Georgia and the South Sandwich Islands', 'Atlantic/South_Georgia'), # UTC-02:00
|
||||
('Azores', 'Atlantic/Azores'), # UTC-01:00 (DST: UTC+00:00)
|
||||
('Cape Verde Islands', 'Atlantic/Cape_Verde'), # UTC-01:00
|
||||
('Dublin', 'Europe/Dublin'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Reykjavik', 'Atlantic/Reykjavik'), # UTC+00:00
|
||||
('Lisbon', 'Europe/Lisbon'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Monrovia', 'Africa/Monrovia'), # UTC+00:00
|
||||
('Casablanca', 'Africa/Casablanca'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Central European Time (Berlin, Rome, Paris)', 'Europe/Paris'), # UTC+01:00 (DST: UTC+02:00)
|
||||
('West Central Africa', 'Africa/Lagos'), # UTC+01:00
|
||||
('Algiers', 'Africa/Algiers'), # UTC+01:00
|
||||
('Lagos', 'Africa/Lagos'), # UTC+01:00
|
||||
('Tunis', 'Africa/Tunis'), # UTC+01:00
|
||||
('Eastern European Time (Cairo, Helsinki, Kyiv)', 'Europe/Kiev'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Athens', 'Europe/Athens'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Jerusalem', 'Asia/Jerusalem'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Johannesburg', 'Africa/Johannesburg'), # UTC+02:00
|
||||
('Harare, Pretoria', 'Africa/Harare'), # UTC+02:00
|
||||
('Moscow Time', 'Europe/Moscow'), # UTC+03:00
|
||||
('Baghdad', 'Asia/Baghdad'), # UTC+03:00
|
||||
('Nairobi', 'Africa/Nairobi'), # UTC+03:00
|
||||
('Kuwait, Riyadh', 'Asia/Riyadh'), # UTC+03:00
|
||||
('Tehran', 'Asia/Tehran'), # UTC+03:30 (DST: UTC+04:30)
|
||||
('Abu Dhabi', 'Asia/Dubai'), # UTC+04:00
|
||||
('Baku', 'Asia/Baku'), # UTC+04:00 (DST: UTC+05:00)
|
||||
('Yerevan', 'Asia/Yerevan'), # UTC+04:00 (DST: UTC+05:00)
|
||||
('Astrakhan', 'Europe/Astrakhan'), # UTC+04:00
|
||||
('Tbilisi', 'Asia/Tbilisi'), # UTC+04:00
|
||||
('Mauritius', 'Indian/Mauritius'), # UTC+04:00
|
||||
('Islamabad', 'Asia/Karachi'), # UTC+05:00
|
||||
('Karachi', 'Asia/Karachi'), # UTC+05:00
|
||||
('Tashkent', 'Asia/Tashkent'), # UTC+05:00
|
||||
('Yekaterinburg', 'Asia/Yekaterinburg'), # UTC+05:00
|
||||
('Maldives', 'Indian/Maldives'), # UTC+05:00
|
||||
('Chagos', 'Indian/Chagos'), # UTC+05:00
|
||||
('Chennai', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Kolkata', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Mumbai', 'Asia/Kolkata'), # UTC+05:30
|
||||
('New Delhi', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Sri Jayawardenepura', 'Asia/Colombo'), # UTC+05:30
|
||||
('Kathmandu', 'Asia/Kathmandu'), # UTC+05:45
|
||||
('Dhaka', 'Asia/Dhaka'), # UTC+06:00
|
||||
('Almaty', 'Asia/Almaty'), # UTC+06:00
|
||||
('Bishkek', 'Asia/Bishkek'), # UTC+06:00
|
||||
('Thimphu', 'Asia/Thimphu'), # UTC+06:00
|
||||
('Yangon (Rangoon)', 'Asia/Yangon'), # UTC+06:30
|
||||
('Cocos Islands', 'Indian/Cocos'), # UTC+06:30
|
||||
('Bangkok', 'Asia/Bangkok'), # UTC+07:00
|
||||
('Hanoi', 'Asia/Ho_Chi_Minh'), # UTC+07:00
|
||||
('Jakarta', 'Asia/Jakarta'), # UTC+07:00
|
||||
('Novosibirsk', 'Asia/Novosibirsk'), # UTC+07:00
|
||||
('Krasnoyarsk', 'Asia/Krasnoyarsk'), # UTC+07:00
|
||||
('Beijing', 'Asia/Shanghai'), # UTC+08:00
|
||||
('Singapore', 'Asia/Singapore'), # UTC+08:00
|
||||
('Perth', 'Australia/Perth'), # UTC+08:00
|
||||
('Hong Kong', 'Asia/Hong_Kong'), # UTC+08:00
|
||||
('Ulaanbaatar', 'Asia/Ulaanbaatar'), # UTC+08:00
|
||||
('Palau', 'Pacific/Palau'), # UTC+08:00
|
||||
('Eucla', 'Australia/Eucla'), # UTC+08:45
|
||||
('Tokyo', 'Asia/Tokyo'), # UTC+09:00
|
||||
('Seoul', 'Asia/Seoul'), # UTC+09:00
|
||||
('Yakutsk', 'Asia/Yakutsk'), # UTC+09:00
|
||||
('Adelaide', 'Australia/Adelaide'), # UTC+09:30 (DST: UTC+10:30)
|
||||
('Darwin', 'Australia/Darwin'), # UTC+09:30
|
||||
('Sydney', 'Australia/Sydney'), # UTC+10:00 (DST: UTC+11:00)
|
||||
('Brisbane', 'Australia/Brisbane'), # UTC+10:00
|
||||
('Guam', 'Pacific/Guam'), # UTC+10:00
|
||||
('Vladivostok', 'Asia/Vladivostok'), # UTC+10:00
|
||||
('Tahiti', 'Pacific/Tahiti'), # UTC+10:00
|
||||
('Lord Howe Island', 'Australia/Lord_Howe'), # UTC+10:30 (DST: UTC+11:00)
|
||||
('Solomon Islands', 'Pacific/Guadalcanal'), # UTC+11:00
|
||||
('Magadan', 'Asia/Magadan'), # UTC+11:00
|
||||
('Norfolk Island', 'Pacific/Norfolk'), # UTC+11:00
|
||||
('Bougainville Island', 'Pacific/Bougainville'), # UTC+11:00
|
||||
('Chokurdakh', 'Asia/Srednekolymsk'), # UTC+11:00
|
||||
('Auckland', 'Pacific/Auckland'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Wellington', 'Pacific/Auckland'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Fiji Islands', 'Pacific/Fiji'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Anadyr', 'Asia/Anadyr'), # UTC+12:00
|
||||
('Chatham Islands', 'Pacific/Chatham'), # UTC+12:45 (DST: UTC+13:45)
|
||||
("Nuku'alofa", 'Pacific/Tongatapu'), # UTC+13:00
|
||||
('Samoa', 'Pacific/Apia'), # UTC+13:00 (DST: UTC+14:00)
|
||||
('Kiritimati Island', 'Pacific/Kiritimati') # UTC+14:00
|
||||
("Midway Island", "Pacific/Midway"), # UTC-11:00
|
||||
("American Samoa", "Pacific/Pago_Pago"), # UTC-11:00
|
||||
("Hawaii", "Pacific/Honolulu"), # UTC-10:00
|
||||
("Aleutian Islands", "America/Adak"), # UTC-10:00 (DST: UTC-09:00)
|
||||
("Marquesas Islands", "Pacific/Marquesas"), # UTC-09:30
|
||||
("Alaska", "America/Anchorage"), # UTC-09:00 (DST: UTC-08:00)
|
||||
("Gambier Islands", "Pacific/Gambier"), # UTC-09:00
|
||||
(
|
||||
"Pacific Time (US and Canada)",
|
||||
"America/Los_Angeles",
|
||||
), # UTC-08:00 (DST: UTC-07:00)
|
||||
("Baja California", "America/Tijuana"), # UTC-08:00 (DST: UTC-07:00)
|
||||
(
|
||||
"Mountain Time (US and Canada)",
|
||||
"America/Denver",
|
||||
), # UTC-07:00 (DST: UTC-06:00)
|
||||
("Arizona", "America/Phoenix"), # UTC-07:00
|
||||
("Chihuahua, Mazatlan", "America/Chihuahua"), # UTC-07:00 (DST: UTC-06:00)
|
||||
(
|
||||
"Central Time (US and Canada)",
|
||||
"America/Chicago",
|
||||
), # UTC-06:00 (DST: UTC-05:00)
|
||||
("Saskatchewan", "America/Regina"), # UTC-06:00
|
||||
(
|
||||
"Guadalajara, Mexico City, Monterrey",
|
||||
"America/Mexico_City",
|
||||
), # UTC-06:00 (DST: UTC-05:00)
|
||||
("Tegucigalpa, Honduras", "America/Tegucigalpa"), # UTC-06:00
|
||||
("Costa Rica", "America/Costa_Rica"), # UTC-06:00
|
||||
(
|
||||
"Eastern Time (US and Canada)",
|
||||
"America/New_York",
|
||||
), # UTC-05:00 (DST: UTC-04:00)
|
||||
("Lima", "America/Lima"), # UTC-05:00
|
||||
("Bogota", "America/Bogota"), # UTC-05:00
|
||||
("Quito", "America/Guayaquil"), # UTC-05:00
|
||||
("Chetumal", "America/Cancun"), # UTC-05:00 (DST: UTC-04:00)
|
||||
("Caracas (Old Venezuela Time)", "America/Caracas"), # UTC-04:30
|
||||
("Atlantic Time (Canada)", "America/Halifax"), # UTC-04:00 (DST: UTC-03:00)
|
||||
("Caracas", "America/Caracas"), # UTC-04:00
|
||||
("Santiago", "America/Santiago"), # UTC-04:00 (DST: UTC-03:00)
|
||||
("La Paz", "America/La_Paz"), # UTC-04:00
|
||||
("Manaus", "America/Manaus"), # UTC-04:00
|
||||
("Georgetown", "America/Guyana"), # UTC-04:00
|
||||
("Bermuda", "Atlantic/Bermuda"), # UTC-04:00 (DST: UTC-03:00)
|
||||
(
|
||||
"Newfoundland Time (Canada)",
|
||||
"America/St_Johns",
|
||||
), # UTC-03:30 (DST: UTC-02:30)
|
||||
("Buenos Aires", "America/Argentina/Buenos_Aires"), # UTC-03:00
|
||||
("Brasilia", "America/Sao_Paulo"), # UTC-03:00
|
||||
("Greenland", "America/Godthab"), # UTC-03:00 (DST: UTC-02:00)
|
||||
("Montevideo", "America/Montevideo"), # UTC-03:00
|
||||
("Falkland Islands", "Atlantic/Stanley"), # UTC-03:00
|
||||
(
|
||||
"South Georgia and the South Sandwich Islands",
|
||||
"Atlantic/South_Georgia",
|
||||
), # UTC-02:00
|
||||
("Azores", "Atlantic/Azores"), # UTC-01:00 (DST: UTC+00:00)
|
||||
("Cape Verde Islands", "Atlantic/Cape_Verde"), # UTC-01:00
|
||||
("Dublin", "Europe/Dublin"), # UTC+00:00 (DST: UTC+01:00)
|
||||
("Reykjavik", "Atlantic/Reykjavik"), # UTC+00:00
|
||||
("Lisbon", "Europe/Lisbon"), # UTC+00:00 (DST: UTC+01:00)
|
||||
("Monrovia", "Africa/Monrovia"), # UTC+00:00
|
||||
("Casablanca", "Africa/Casablanca"), # UTC+00:00 (DST: UTC+01:00)
|
||||
(
|
||||
"Central European Time (Berlin, Rome, Paris)",
|
||||
"Europe/Paris",
|
||||
), # UTC+01:00 (DST: UTC+02:00)
|
||||
("West Central Africa", "Africa/Lagos"), # UTC+01:00
|
||||
("Algiers", "Africa/Algiers"), # UTC+01:00
|
||||
("Lagos", "Africa/Lagos"), # UTC+01:00
|
||||
("Tunis", "Africa/Tunis"), # UTC+01:00
|
||||
(
|
||||
"Eastern European Time (Cairo, Helsinki, Kyiv)",
|
||||
"Europe/Kiev",
|
||||
), # UTC+02:00 (DST: UTC+03:00)
|
||||
("Athens", "Europe/Athens"), # UTC+02:00 (DST: UTC+03:00)
|
||||
("Jerusalem", "Asia/Jerusalem"), # UTC+02:00 (DST: UTC+03:00)
|
||||
("Johannesburg", "Africa/Johannesburg"), # UTC+02:00
|
||||
("Harare, Pretoria", "Africa/Harare"), # UTC+02:00
|
||||
("Moscow Time", "Europe/Moscow"), # UTC+03:00
|
||||
("Baghdad", "Asia/Baghdad"), # UTC+03:00
|
||||
("Nairobi", "Africa/Nairobi"), # UTC+03:00
|
||||
("Kuwait, Riyadh", "Asia/Riyadh"), # UTC+03:00
|
||||
("Tehran", "Asia/Tehran"), # UTC+03:30 (DST: UTC+04:30)
|
||||
("Abu Dhabi", "Asia/Dubai"), # UTC+04:00
|
||||
("Baku", "Asia/Baku"), # UTC+04:00 (DST: UTC+05:00)
|
||||
("Yerevan", "Asia/Yerevan"), # UTC+04:00 (DST: UTC+05:00)
|
||||
("Astrakhan", "Europe/Astrakhan"), # UTC+04:00
|
||||
("Tbilisi", "Asia/Tbilisi"), # UTC+04:00
|
||||
("Mauritius", "Indian/Mauritius"), # UTC+04:00
|
||||
("Islamabad", "Asia/Karachi"), # UTC+05:00
|
||||
("Karachi", "Asia/Karachi"), # UTC+05:00
|
||||
("Tashkent", "Asia/Tashkent"), # UTC+05:00
|
||||
("Yekaterinburg", "Asia/Yekaterinburg"), # UTC+05:00
|
||||
("Maldives", "Indian/Maldives"), # UTC+05:00
|
||||
("Chagos", "Indian/Chagos"), # UTC+05:00
|
||||
("Chennai", "Asia/Kolkata"), # UTC+05:30
|
||||
("Kolkata", "Asia/Kolkata"), # UTC+05:30
|
||||
("Mumbai", "Asia/Kolkata"), # UTC+05:30
|
||||
("New Delhi", "Asia/Kolkata"), # UTC+05:30
|
||||
("Sri Jayawardenepura", "Asia/Colombo"), # UTC+05:30
|
||||
("Kathmandu", "Asia/Kathmandu"), # UTC+05:45
|
||||
("Dhaka", "Asia/Dhaka"), # UTC+06:00
|
||||
("Almaty", "Asia/Almaty"), # UTC+06:00
|
||||
("Bishkek", "Asia/Bishkek"), # UTC+06:00
|
||||
("Thimphu", "Asia/Thimphu"), # UTC+06:00
|
||||
("Yangon (Rangoon)", "Asia/Yangon"), # UTC+06:30
|
||||
("Cocos Islands", "Indian/Cocos"), # UTC+06:30
|
||||
("Bangkok", "Asia/Bangkok"), # UTC+07:00
|
||||
("Hanoi", "Asia/Ho_Chi_Minh"), # UTC+07:00
|
||||
("Jakarta", "Asia/Jakarta"), # UTC+07:00
|
||||
("Novosibirsk", "Asia/Novosibirsk"), # UTC+07:00
|
||||
("Krasnoyarsk", "Asia/Krasnoyarsk"), # UTC+07:00
|
||||
("Beijing", "Asia/Shanghai"), # UTC+08:00
|
||||
("Singapore", "Asia/Singapore"), # UTC+08:00
|
||||
("Perth", "Australia/Perth"), # UTC+08:00
|
||||
("Hong Kong", "Asia/Hong_Kong"), # UTC+08:00
|
||||
("Ulaanbaatar", "Asia/Ulaanbaatar"), # UTC+08:00
|
||||
("Palau", "Pacific/Palau"), # UTC+08:00
|
||||
("Eucla", "Australia/Eucla"), # UTC+08:45
|
||||
("Tokyo", "Asia/Tokyo"), # UTC+09:00
|
||||
("Seoul", "Asia/Seoul"), # UTC+09:00
|
||||
("Yakutsk", "Asia/Yakutsk"), # UTC+09:00
|
||||
("Adelaide", "Australia/Adelaide"), # UTC+09:30 (DST: UTC+10:30)
|
||||
("Darwin", "Australia/Darwin"), # UTC+09:30
|
||||
("Sydney", "Australia/Sydney"), # UTC+10:00 (DST: UTC+11:00)
|
||||
("Brisbane", "Australia/Brisbane"), # UTC+10:00
|
||||
("Guam", "Pacific/Guam"), # UTC+10:00
|
||||
("Vladivostok", "Asia/Vladivostok"), # UTC+10:00
|
||||
("Tahiti", "Pacific/Tahiti"), # UTC+10:00
|
||||
("Lord Howe Island", "Australia/Lord_Howe"), # UTC+10:30 (DST: UTC+11:00)
|
||||
("Solomon Islands", "Pacific/Guadalcanal"), # UTC+11:00
|
||||
("Magadan", "Asia/Magadan"), # UTC+11:00
|
||||
("Norfolk Island", "Pacific/Norfolk"), # UTC+11:00
|
||||
("Bougainville Island", "Pacific/Bougainville"), # UTC+11:00
|
||||
("Chokurdakh", "Asia/Srednekolymsk"), # UTC+11:00
|
||||
("Auckland", "Pacific/Auckland"), # UTC+12:00 (DST: UTC+13:00)
|
||||
("Wellington", "Pacific/Auckland"), # UTC+12:00 (DST: UTC+13:00)
|
||||
("Fiji Islands", "Pacific/Fiji"), # UTC+12:00 (DST: UTC+13:00)
|
||||
("Anadyr", "Asia/Anadyr"), # UTC+12:00
|
||||
("Chatham Islands", "Pacific/Chatham"), # UTC+12:45 (DST: UTC+13:45)
|
||||
("Nuku'alofa", "Pacific/Tongatapu"), # UTC+13:00
|
||||
("Samoa", "Pacific/Apia"), # UTC+13:00 (DST: UTC+14:00)
|
||||
("Kiritimati Island", "Pacific/Kiritimati"), # UTC+14:00
|
||||
]
|
||||
|
||||
timezone_list = []
|
||||
@@ -150,7 +177,6 @@ class TimezoneEndpoint(APIView):
|
||||
|
||||
# Process timezone mapping
|
||||
for friendly_name, tz_identifier in timezone_locations:
|
||||
|
||||
try:
|
||||
tz = pytz.timezone(tz_identifier)
|
||||
current_offset = now.astimezone(tz).strftime("%z")
|
||||
|
||||
@@ -3,6 +3,7 @@ import csv
|
||||
import io
|
||||
import os
|
||||
from datetime import date
|
||||
import uuid
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from django.db import IntegrityError
|
||||
@@ -35,6 +36,7 @@ from plane.db.models import (
|
||||
Workspace,
|
||||
WorkspaceMember,
|
||||
WorkspaceTheme,
|
||||
Profile,
|
||||
)
|
||||
from plane.app.permissions import ROLE, allow_permission
|
||||
from django.utils.decorators import method_decorator
|
||||
@@ -43,6 +45,7 @@ from django.views.decorators.vary import vary_on_cookie
|
||||
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.bgtasks.workspace_seed_task import workspace_seed
|
||||
from plane.utils.url import contains_url
|
||||
|
||||
|
||||
class WorkSpaceViewSet(BaseViewSet):
|
||||
@@ -109,6 +112,12 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if contains_url(name):
|
||||
return Response(
|
||||
{"error": "Name cannot contain a URL"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if serializer.is_valid(raise_exception=True):
|
||||
serializer.save(owner=request.user)
|
||||
# Create Workspace member
|
||||
@@ -150,8 +159,18 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
return super().partial_update(request, *args, **kwargs)
|
||||
|
||||
def remove_last_workspace_ids_from_user_settings(self, id: uuid.UUID) -> None:
|
||||
"""
|
||||
Remove the last workspace id from the user settings
|
||||
"""
|
||||
Profile.objects.filter(last_workspace_id=id).update(last_workspace_id=None)
|
||||
return
|
||||
|
||||
@allow_permission([ROLE.ADMIN], level="WORKSPACE")
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
# Get the workspace
|
||||
workspace = self.get_object()
|
||||
self.remove_last_workspace_ids_from_user_settings(workspace.id)
|
||||
return super().destroy(request, *args, **kwargs)
|
||||
|
||||
|
||||
@@ -159,8 +178,6 @@ class UserWorkSpacesEndpoint(BaseAPIView):
|
||||
search_fields = ["name"]
|
||||
filterset_fields = ["owner"]
|
||||
|
||||
@method_decorator(cache_control(private=True, max_age=12))
|
||||
@method_decorator(vary_on_cookie)
|
||||
def get(self, request):
|
||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
||||
member_count = (
|
||||
|
||||
@@ -12,6 +12,7 @@ from plane.app.permissions import WorkspaceViewerPermission
|
||||
from plane.app.serializers.cycle import CycleSerializer
|
||||
from plane.utils.timezone_converter import user_timezone_converter
|
||||
|
||||
|
||||
class WorkspaceCyclesEndpoint(BaseAPIView):
|
||||
permission_classes = [WorkspaceViewerPermission]
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
model = DraftIssue
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# Django imports
|
||||
from django.db.models import Count, Q, OuterRef, Subquery, IntegerField
|
||||
from django.utils import timezone
|
||||
from django.db.models.functions import Coalesce
|
||||
|
||||
# Third party modules
|
||||
@@ -133,7 +134,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
# Deactivate the users from the projects where the user is part of
|
||||
_ = ProjectMember.objects.filter(
|
||||
workspace__slug=slug, member_id=workspace_member.member_id, is_active=True
|
||||
).update(is_active=False)
|
||||
).update(is_active=False, updated_at=timezone.now())
|
||||
|
||||
workspace_member.is_active = False
|
||||
workspace_member.save()
|
||||
@@ -194,7 +195,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
# # Deactivate the users from the projects where the user is part of
|
||||
_ = ProjectMember.objects.filter(
|
||||
workspace__slug=slug, member_id=workspace_member.member_id, is_active=True
|
||||
).update(is_active=False)
|
||||
).update(is_active=False, updated_at=timezone.now())
|
||||
|
||||
# # Deactivate the user
|
||||
workspace_member.is_active = False
|
||||
|
||||
@@ -27,10 +27,7 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
|
||||
|
||||
create_preference_keys = []
|
||||
|
||||
keys = [
|
||||
key
|
||||
for key, _ in WorkspaceUserPreference.UserPreferenceKeys.choices
|
||||
]
|
||||
keys = [key for key, _ in WorkspaceUserPreference.UserPreferenceKeys.choices]
|
||||
|
||||
for preference in keys:
|
||||
if preference not in get_preference.values_list("key", flat=True):
|
||||
@@ -39,7 +36,10 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
|
||||
preference = WorkspaceUserPreference.objects.bulk_create(
|
||||
[
|
||||
WorkspaceUserPreference(
|
||||
key=key, user=request.user, workspace=workspace, sort_order=(65535 + (i*10000))
|
||||
key=key,
|
||||
user=request.user,
|
||||
workspace=workspace,
|
||||
sort_order=(65535 + (i * 10000)),
|
||||
)
|
||||
for i, key in enumerate(create_preference_keys)
|
||||
],
|
||||
@@ -47,10 +47,13 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
preferences = WorkspaceUserPreference.objects.filter(
|
||||
user=request.user, workspace_id=workspace.id
|
||||
).order_by("sort_order").values("key", "is_pinned", "sort_order")
|
||||
|
||||
preferences = (
|
||||
WorkspaceUserPreference.objects.filter(
|
||||
user=request.user, workspace_id=workspace.id
|
||||
)
|
||||
.order_by("sort_order")
|
||||
.values("key", "is_pinned", "sort_order")
|
||||
)
|
||||
|
||||
user_preferences = {}
|
||||
|
||||
@@ -58,7 +61,7 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
|
||||
user_preferences[(str(preference["key"]))] = {
|
||||
"is_pinned": preference["is_pinned"],
|
||||
"sort_order": preference["sort_order"],
|
||||
}
|
||||
}
|
||||
return Response(
|
||||
user_preferences,
|
||||
status=status.HTTP_200_OK,
|
||||
|
||||
@@ -18,6 +18,7 @@ from plane.bgtasks.user_activation_email_task import user_activation_email
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
|
||||
class Adapter:
|
||||
"""Common interface for all auth providers"""
|
||||
|
||||
|
||||
@@ -41,7 +41,6 @@ AUTHENTICATION_ERROR_CODES = {
|
||||
"GOOGLE_OAUTH_PROVIDER_ERROR": 5115,
|
||||
"GITHUB_OAUTH_PROVIDER_ERROR": 5120,
|
||||
"GITLAB_OAUTH_PROVIDER_ERROR": 5121,
|
||||
|
||||
# Reset Password
|
||||
"INVALID_PASSWORD_TOKEN": 5125,
|
||||
"EXPIRED_PASSWORD_TOKEN": 5130,
|
||||
|
||||
@@ -25,23 +25,24 @@ class GitHubOAuthProvider(OauthAdapter):
|
||||
|
||||
organization_scope = "read:org"
|
||||
|
||||
|
||||
def __init__(self, request, code=None, state=None, callback=None):
|
||||
GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, GITHUB_ORGANIZATION_ID = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "GITHUB_CLIENT_ID",
|
||||
"default": os.environ.get("GITHUB_CLIENT_ID"),
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_CLIENT_SECRET",
|
||||
"default": os.environ.get("GITHUB_CLIENT_SECRET"),
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_ORGANIZATION_ID",
|
||||
"default": os.environ.get("GITHUB_ORGANIZATION_ID"),
|
||||
},
|
||||
]
|
||||
GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, GITHUB_ORGANIZATION_ID = (
|
||||
get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "GITHUB_CLIENT_ID",
|
||||
"default": os.environ.get("GITHUB_CLIENT_ID"),
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_CLIENT_SECRET",
|
||||
"default": os.environ.get("GITHUB_CLIENT_SECRET"),
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_ORGANIZATION_ID",
|
||||
"default": os.environ.get("GITHUB_ORGANIZATION_ID"),
|
||||
},
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if not (GITHUB_CLIENT_ID and GITHUB_CLIENT_SECRET):
|
||||
@@ -128,7 +129,10 @@ class GitHubOAuthProvider(OauthAdapter):
|
||||
|
||||
def is_user_in_organization(self, github_username):
|
||||
headers = {"Authorization": f"Bearer {self.token_data.get('access_token')}"}
|
||||
response = requests.get(f"{self.org_membership_url}/{self.organization_id}/memberships/{github_username}", headers=headers)
|
||||
response = requests.get(
|
||||
f"{self.org_membership_url}/{self.organization_id}/memberships/{github_username}",
|
||||
headers=headers,
|
||||
)
|
||||
return response.status_code == 200 # 200 means the user is a member
|
||||
|
||||
def set_user_data(self):
|
||||
@@ -145,7 +149,6 @@ class GitHubOAuthProvider(OauthAdapter):
|
||||
error_message="GITHUB_USER_NOT_IN_ORG",
|
||||
)
|
||||
|
||||
|
||||
email = self.__get_email(headers=headers)
|
||||
super().set_user_data(
|
||||
{
|
||||
|
||||
@@ -42,11 +42,11 @@ urlpatterns = [
|
||||
# credentials
|
||||
path("sign-in/", SignInAuthEndpoint.as_view(), name="sign-in"),
|
||||
path("sign-up/", SignUpAuthEndpoint.as_view(), name="sign-up"),
|
||||
path("spaces/sign-in/", SignInAuthSpaceEndpoint.as_view(), name="sign-in"),
|
||||
path("spaces/sign-up/", SignUpAuthSpaceEndpoint.as_view(), name="sign-in"),
|
||||
path("spaces/sign-in/", SignInAuthSpaceEndpoint.as_view(), name="space-sign-in"),
|
||||
path("spaces/sign-up/", SignUpAuthSpaceEndpoint.as_view(), name="space-sign-up"),
|
||||
# signout
|
||||
path("sign-out/", SignOutAuthEndpoint.as_view(), name="sign-out"),
|
||||
path("spaces/sign-out/", SignOutAuthSpaceEndpoint.as_view(), name="sign-out"),
|
||||
path("spaces/sign-out/", SignOutAuthSpaceEndpoint.as_view(), name="space-sign-out"),
|
||||
# csrf token
|
||||
path("get-csrf-token/", CSRFTokenEndpoint.as_view(), name="get_csrf_token"),
|
||||
# Magic sign in
|
||||
@@ -56,17 +56,17 @@ urlpatterns = [
|
||||
path(
|
||||
"spaces/magic-generate/",
|
||||
MagicGenerateSpaceEndpoint.as_view(),
|
||||
name="magic-generate",
|
||||
name="space-magic-generate",
|
||||
),
|
||||
path(
|
||||
"spaces/magic-sign-in/",
|
||||
MagicSignInSpaceEndpoint.as_view(),
|
||||
name="magic-sign-in",
|
||||
name="space-magic-sign-in",
|
||||
),
|
||||
path(
|
||||
"spaces/magic-sign-up/",
|
||||
MagicSignUpSpaceEndpoint.as_view(),
|
||||
name="magic-sign-up",
|
||||
name="space-magic-sign-up",
|
||||
),
|
||||
## Google Oauth
|
||||
path("google/", GoogleOauthInitiateEndpoint.as_view(), name="google-initiate"),
|
||||
@@ -74,12 +74,12 @@ urlpatterns = [
|
||||
path(
|
||||
"spaces/google/",
|
||||
GoogleOauthInitiateSpaceEndpoint.as_view(),
|
||||
name="google-initiate",
|
||||
name="space-google-initiate",
|
||||
),
|
||||
path(
|
||||
"google/callback/",
|
||||
"spaces/google/callback/",
|
||||
GoogleCallbackSpaceEndpoint.as_view(),
|
||||
name="google-callback",
|
||||
name="space-google-callback",
|
||||
),
|
||||
## Github Oauth
|
||||
path("github/", GitHubOauthInitiateEndpoint.as_view(), name="github-initiate"),
|
||||
@@ -87,12 +87,12 @@ urlpatterns = [
|
||||
path(
|
||||
"spaces/github/",
|
||||
GitHubOauthInitiateSpaceEndpoint.as_view(),
|
||||
name="github-initiate",
|
||||
name="space-github-initiate",
|
||||
),
|
||||
path(
|
||||
"spaces/github/callback/",
|
||||
GitHubCallbackSpaceEndpoint.as_view(),
|
||||
name="github-callback",
|
||||
name="space-github-callback",
|
||||
),
|
||||
## Gitlab Oauth
|
||||
path("gitlab/", GitLabOauthInitiateEndpoint.as_view(), name="gitlab-initiate"),
|
||||
@@ -100,12 +100,12 @@ urlpatterns = [
|
||||
path(
|
||||
"spaces/gitlab/",
|
||||
GitLabOauthInitiateSpaceEndpoint.as_view(),
|
||||
name="gitlab-initiate",
|
||||
name="space-gitlab-initiate",
|
||||
),
|
||||
path(
|
||||
"spaces/gitlab/callback/",
|
||||
GitLabCallbackSpaceEndpoint.as_view(),
|
||||
name="gitlab-callback",
|
||||
name="space-gitlab-callback",
|
||||
),
|
||||
# Email Check
|
||||
path("email-check/", EmailCheckEndpoint.as_view(), name="email-check"),
|
||||
@@ -120,12 +120,12 @@ urlpatterns = [
|
||||
path(
|
||||
"spaces/forgot-password/",
|
||||
ForgotPasswordSpaceEndpoint.as_view(),
|
||||
name="forgot-password",
|
||||
name="space-forgot-password",
|
||||
),
|
||||
path(
|
||||
"spaces/reset-password/<uidb64>/<token>/",
|
||||
ResetPasswordSpaceEndpoint.as_view(),
|
||||
name="forgot-password",
|
||||
name="space-forgot-password",
|
||||
),
|
||||
path("change-password/", ChangePasswordEndpoint.as_view(), name="forgot-password"),
|
||||
path("set-password/", SetUserPasswordEndpoint.as_view(), name="set-password"),
|
||||
|
||||
@@ -1,20 +1,29 @@
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.http import HttpRequest
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.request import Request
|
||||
|
||||
# Module imports
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
def base_host(request: Request | HttpRequest, is_admin: bool = False, is_space: bool = False, is_app: bool = False) -> str:
|
||||
|
||||
def base_host(
|
||||
request: Request | HttpRequest,
|
||||
is_admin: bool = False,
|
||||
is_space: bool = False,
|
||||
is_app: bool = False,
|
||||
) -> str:
|
||||
"""Utility function to return host / origin from the request"""
|
||||
# Calculate the base origin from request
|
||||
base_origin = settings.WEB_URL or settings.APP_BASE_URL
|
||||
|
||||
# Admin redirections
|
||||
# Admin redirection
|
||||
if is_admin:
|
||||
admin_base_path = getattr(settings, "ADMIN_BASE_PATH", "/god-mode/")
|
||||
admin_base_path = getattr(settings, "ADMIN_BASE_PATH", None)
|
||||
if not isinstance(admin_base_path, str):
|
||||
admin_base_path = "/god-mode/"
|
||||
if not admin_base_path.startswith("/"):
|
||||
admin_base_path = "/" + admin_base_path
|
||||
if not admin_base_path.endswith("/"):
|
||||
@@ -25,9 +34,11 @@ def base_host(request: Request | HttpRequest, is_admin: bool = False, is_space:
|
||||
else:
|
||||
return base_origin + admin_base_path
|
||||
|
||||
# Space redirections
|
||||
# Space redirection
|
||||
if is_space:
|
||||
space_base_path = getattr(settings, "SPACE_BASE_PATH", "/spaces/")
|
||||
space_base_path = getattr(settings, "SPACE_BASE_PATH", None)
|
||||
if not isinstance(space_base_path, str):
|
||||
space_base_path = "/spaces/"
|
||||
if not space_base_path.startswith("/"):
|
||||
space_base_path = "/" + space_base_path
|
||||
if not space_base_path.endswith("/"):
|
||||
|
||||
@@ -6,6 +6,7 @@ from django.conf import settings
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
|
||||
def user_login(request, user, is_app=False, is_admin=False, is_space=False):
|
||||
login(request=request, user=user)
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class SignInAuthEndpoint(View):
|
||||
def post(self, request):
|
||||
next_path = request.POST.get("next_path")
|
||||
|
||||
@@ -18,6 +18,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GitHubOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
# Get host and next path
|
||||
|
||||
@@ -18,6 +18,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GitLabOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
# Get host and next path
|
||||
|
||||
@@ -20,6 +20,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GoogleOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
request.session["host"] = base_host(request=request, is_app=True)
|
||||
@@ -95,7 +96,9 @@ class GoogleCallbackEndpoint(View):
|
||||
# Get the redirection path
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
url = urljoin(base_host, str(validate_next_path(next_path)) if next_path else path)
|
||||
url = urljoin(
|
||||
base_host, str(validate_next_path(next_path)) if next_path else path
|
||||
)
|
||||
return HttpResponseRedirect(url)
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
|
||||
@@ -53,12 +53,14 @@ class ChangePasswordEndpoint(APIView):
|
||||
error_message="MISSING_PASSWORD",
|
||||
payload={"error": "Old password is missing"},
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(
|
||||
exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Get the new password
|
||||
new_password = request.data.get("new_password", False)
|
||||
|
||||
if not new_password:
|
||||
if not new_password:
|
||||
exc = AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["MISSING_PASSWORD"],
|
||||
error_message="MISSING_PASSWORD",
|
||||
@@ -66,7 +68,6 @@ class ChangePasswordEndpoint(APIView):
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
# If the user password is not autoset then we need to check the old passwords
|
||||
if not user.is_password_autoset and not user.check_password(old_password):
|
||||
exc = AuthenticationException(
|
||||
|
||||
@@ -25,6 +25,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class MagicGenerateSpaceEndpoint(APIView):
|
||||
permission_classes = [AllowAny]
|
||||
|
||||
@@ -38,7 +39,6 @@ class MagicGenerateSpaceEndpoint(APIView):
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
email = request.data.get("email", "").strip().lower()
|
||||
try:
|
||||
validate_email(email)
|
||||
|
||||
@@ -459,8 +459,37 @@ def analytic_export_task(email, data, slug):
|
||||
|
||||
csv_buffer = generate_csv_from_rows(rows)
|
||||
send_export_email(email, slug, csv_buffer, rows)
|
||||
logging.getLogger("plane").info("Email sent succesfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return
|
||||
|
||||
|
||||
@shared_task
|
||||
def export_analytics_to_csv_email(data, headers, keys, email, slug):
|
||||
try:
|
||||
"""
|
||||
Prepares a CSV from data and sends it as an email attachment.
|
||||
|
||||
Parameters:
|
||||
- data: List of dictionaries (e.g. from .values())
|
||||
- headers: List of CSV column headers
|
||||
- keys: Keys to extract from each data item (dict)
|
||||
- email: Email address to send to
|
||||
- slug: Used for the filename
|
||||
"""
|
||||
# Prepare rows: header + data rows
|
||||
rows = [headers]
|
||||
for item in data:
|
||||
row = [item.get(key, "") for key in keys]
|
||||
rows.append(row)
|
||||
|
||||
# Generate CSV buffer
|
||||
csv_buffer = generate_csv_from_rows(rows)
|
||||
|
||||
# Send email with CSV attachment
|
||||
send_export_email(email=email, slug=slug, csv_buffer=csv_buffer, rows=rows)
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return
|
||||
|
||||
@@ -12,6 +12,7 @@ from plane.db.models import FileAsset, Page, Issue
|
||||
from plane.utils.exception_logger import log_exception
|
||||
from plane.settings.storage import S3Storage
|
||||
from celery import shared_task
|
||||
from plane.utils.url import normalize_url_path
|
||||
|
||||
|
||||
def get_entity_id_field(entity_type, entity_id):
|
||||
@@ -67,11 +68,14 @@ def sync_with_external_service(entity_name, description_html):
|
||||
"description_html": description_html,
|
||||
"variant": "rich" if entity_name == "PAGE" else "document",
|
||||
}
|
||||
response = requests.post(
|
||||
f"{settings.LIVE_BASE_URL}/convert-document/",
|
||||
json=data,
|
||||
headers=None,
|
||||
)
|
||||
|
||||
live_url = settings.LIVE_URL
|
||||
if not live_url:
|
||||
return {}
|
||||
|
||||
url = normalize_url_path(f"{live_url}/convert-document/")
|
||||
|
||||
response = requests.post(url, json=data, headers=None)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
except requests.RequestException as e:
|
||||
|
||||
@@ -33,6 +33,7 @@ from plane.db.models import (
|
||||
Intake,
|
||||
IntakeIssue,
|
||||
)
|
||||
from plane.db.models.intake import SourceType
|
||||
|
||||
|
||||
def create_project(workspace, user_id):
|
||||
@@ -388,7 +389,7 @@ def create_intake_issues(workspace, project, user_id, intake_issue_count):
|
||||
if status == 0
|
||||
else None
|
||||
),
|
||||
source="in-app",
|
||||
source=SourceType.IN_APP,
|
||||
workspace=workspace,
|
||||
project=project,
|
||||
)
|
||||
|
||||
@@ -284,6 +284,7 @@ def send_email_notification(
|
||||
"project": str(issue.project.name),
|
||||
"user_preference": f"{base_api}/profile/preferences/email",
|
||||
"comments": comments,
|
||||
"entity_type": "issue",
|
||||
}
|
||||
html_content = render_to_string(
|
||||
"emails/notifications/issue-updates.html", context
|
||||
@@ -309,7 +310,7 @@ def send_email_notification(
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email Sent Successfully")
|
||||
logging.getLogger("plane.worker").info("Email Sent Successfully")
|
||||
|
||||
# Update the logs
|
||||
EmailNotificationLog.objects.filter(
|
||||
@@ -325,7 +326,7 @@ def send_email_notification(
|
||||
release_lock(lock_id=lock_id)
|
||||
return
|
||||
else:
|
||||
logging.getLogger("plane").info("Duplicate email received skipping")
|
||||
logging.getLogger("plane.worker").info("Duplicate email received skipping")
|
||||
return
|
||||
except (Issue.DoesNotExist, User.DoesNotExist):
|
||||
release_lock(lock_id=lock_id)
|
||||
|
||||
@@ -3,34 +3,49 @@ import csv
|
||||
import io
|
||||
import json
|
||||
import zipfile
|
||||
|
||||
from typing import List
|
||||
import boto3
|
||||
from botocore.client import Config
|
||||
from uuid import UUID
|
||||
from datetime import datetime, date
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from openpyxl import Workbook
|
||||
from django.db.models import F, Prefetch
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import ExporterHistory, Issue
|
||||
from plane.db.models import ExporterHistory, Issue, FileAsset, Label, User, IssueComment
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
|
||||
def dateTimeConverter(time):
|
||||
def dateTimeConverter(time: datetime) -> str | None:
|
||||
"""
|
||||
Convert a datetime object to a formatted string.
|
||||
"""
|
||||
if time:
|
||||
return time.strftime("%a, %d %b %Y %I:%M:%S %Z%z")
|
||||
|
||||
|
||||
def dateConverter(time):
|
||||
def dateConverter(time: date) -> str | None:
|
||||
"""
|
||||
Convert a date object to a formatted string.
|
||||
"""
|
||||
if time:
|
||||
return time.strftime("%a, %d %b %Y")
|
||||
|
||||
|
||||
def create_csv_file(data):
|
||||
def create_csv_file(data: List[List[str]]) -> str:
|
||||
"""
|
||||
Create a CSV file from the provided data.
|
||||
"""
|
||||
csv_buffer = io.StringIO()
|
||||
csv_writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
|
||||
|
||||
@@ -41,11 +56,17 @@ def create_csv_file(data):
|
||||
return csv_buffer.getvalue()
|
||||
|
||||
|
||||
def create_json_file(data):
|
||||
def create_json_file(data: List[dict]) -> str:
|
||||
"""
|
||||
Create a JSON file from the provided data.
|
||||
"""
|
||||
return json.dumps(data)
|
||||
|
||||
|
||||
def create_xlsx_file(data):
|
||||
def create_xlsx_file(data: List[List[str]]) -> bytes:
|
||||
"""
|
||||
Create an XLSX file from the provided data.
|
||||
"""
|
||||
workbook = Workbook()
|
||||
sheet = workbook.active
|
||||
|
||||
@@ -58,7 +79,10 @@ def create_xlsx_file(data):
|
||||
return xlsx_buffer.getvalue()
|
||||
|
||||
|
||||
def create_zip_file(files):
|
||||
def create_zip_file(files: List[tuple[str, str | bytes]]) -> io.BytesIO:
|
||||
"""
|
||||
Create a ZIP file from the provided files.
|
||||
"""
|
||||
zip_buffer = io.BytesIO()
|
||||
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
for filename, file_content in files:
|
||||
@@ -68,7 +92,13 @@ def create_zip_file(files):
|
||||
return zip_buffer
|
||||
|
||||
|
||||
def upload_to_s3(zip_file, workspace_id, token_id, slug):
|
||||
# TODO: Change the upload_to_s3 function to use the new storage method with entry in file asset table
|
||||
def upload_to_s3(
|
||||
zip_file: io.BytesIO, workspace_id: UUID, token_id: str, slug: str
|
||||
) -> None:
|
||||
"""
|
||||
Upload a ZIP file to S3 and generate a presigned URL.
|
||||
"""
|
||||
file_name = (
|
||||
f"{workspace_id}/export-{slug}-{token_id[:6]}-{str(timezone.now().date())}.zip"
|
||||
)
|
||||
@@ -150,75 +180,85 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
|
||||
exporter_instance.save(update_fields=["status", "url", "key"])
|
||||
|
||||
|
||||
def generate_table_row(issue):
|
||||
def generate_table_row(issue: dict) -> List[str]:
|
||||
"""
|
||||
Generate a table row from an issue dictionary.
|
||||
"""
|
||||
return [
|
||||
f"""{issue["project__identifier"]}-{issue["sequence_id"]}""",
|
||||
issue["project__name"],
|
||||
f"""{issue["project_identifier"]}-{issue["sequence_id"]}""",
|
||||
issue["project_name"],
|
||||
issue["name"],
|
||||
issue["description_stripped"],
|
||||
issue["state__name"],
|
||||
issue["description"],
|
||||
issue["state_name"],
|
||||
dateConverter(issue["start_date"]),
|
||||
dateConverter(issue["target_date"]),
|
||||
issue["priority"],
|
||||
(
|
||||
f"{issue['created_by__first_name']} {issue['created_by__last_name']}"
|
||||
if issue["created_by__first_name"] and issue["created_by__last_name"]
|
||||
else ""
|
||||
),
|
||||
(
|
||||
f"{issue['assignees__first_name']} {issue['assignees__last_name']}"
|
||||
if issue["assignees__first_name"] and issue["assignees__last_name"]
|
||||
else ""
|
||||
),
|
||||
issue["labels__name"] if issue["labels__name"] else "",
|
||||
issue["issue_cycle__cycle__name"],
|
||||
dateConverter(issue["issue_cycle__cycle__start_date"]),
|
||||
dateConverter(issue["issue_cycle__cycle__end_date"]),
|
||||
issue["issue_module__module__name"],
|
||||
dateConverter(issue["issue_module__module__start_date"]),
|
||||
dateConverter(issue["issue_module__module__target_date"]),
|
||||
issue["created_by"],
|
||||
", ".join(issue["labels"]) if issue["labels"] else "",
|
||||
issue["cycle_name"],
|
||||
issue["cycle_start_date"],
|
||||
issue["cycle_end_date"],
|
||||
", ".join(issue.get("module_name", "")) if issue.get("module_name") else "",
|
||||
dateTimeConverter(issue["created_at"]),
|
||||
dateTimeConverter(issue["updated_at"]),
|
||||
dateTimeConverter(issue["completed_at"]),
|
||||
dateTimeConverter(issue["archived_at"]),
|
||||
(
|
||||
", ".join(
|
||||
[
|
||||
f"{comment['comment']} ({comment['created_at']} by {comment['created_by']})"
|
||||
for comment in issue["comments"]
|
||||
]
|
||||
)
|
||||
if issue["comments"]
|
||||
else ""
|
||||
),
|
||||
issue["estimate"] if issue["estimate"] else "",
|
||||
", ".join(issue["link"]) if issue["link"] else "",
|
||||
", ".join(issue["assignees"]) if issue["assignees"] else "",
|
||||
issue["subscribers_count"] if issue["subscribers_count"] else "",
|
||||
issue["attachment_count"] if issue["attachment_count"] else "",
|
||||
", ".join(issue["attachment_links"]) if issue["attachment_links"] else "",
|
||||
]
|
||||
|
||||
|
||||
def generate_json_row(issue):
|
||||
def generate_json_row(issue: dict) -> dict:
|
||||
"""
|
||||
Generate a JSON row from an issue dictionary.
|
||||
"""
|
||||
return {
|
||||
"ID": f"""{issue["project__identifier"]}-{issue["sequence_id"]}""",
|
||||
"Project": issue["project__name"],
|
||||
"ID": f"""{issue["project_identifier"]}-{issue["sequence_id"]}""",
|
||||
"Project": issue["project_name"],
|
||||
"Name": issue["name"],
|
||||
"Description": issue["description_stripped"],
|
||||
"State": issue["state__name"],
|
||||
"Description": issue["description"],
|
||||
"State": issue["state_name"],
|
||||
"Start Date": dateConverter(issue["start_date"]),
|
||||
"Target Date": dateConverter(issue["target_date"]),
|
||||
"Priority": issue["priority"],
|
||||
"Created By": (
|
||||
f"{issue['created_by__first_name']} {issue['created_by__last_name']}"
|
||||
if issue["created_by__first_name"] and issue["created_by__last_name"]
|
||||
else ""
|
||||
),
|
||||
"Assignee": (
|
||||
f"{issue['assignees__first_name']} {issue['assignees__last_name']}"
|
||||
if issue["assignees__first_name"] and issue["assignees__last_name"]
|
||||
else ""
|
||||
),
|
||||
"Labels": issue["labels__name"] if issue["labels__name"] else "",
|
||||
"Cycle Name": issue["issue_cycle__cycle__name"],
|
||||
"Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]),
|
||||
"Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]),
|
||||
"Module Name": issue["issue_module__module__name"],
|
||||
"Module Start Date": dateConverter(issue["issue_module__module__start_date"]),
|
||||
"Module Target Date": dateConverter(issue["issue_module__module__target_date"]),
|
||||
"Created By": (f"{issue['created_by']}" if issue["created_by"] else ""),
|
||||
"Assignee": issue["assignees"],
|
||||
"Labels": issue["labels"],
|
||||
"Cycle Name": issue["cycle_name"],
|
||||
"Cycle Start Date": issue["cycle_start_date"],
|
||||
"Cycle End Date": issue["cycle_end_date"],
|
||||
"Module Name": issue["module_name"],
|
||||
"Created At": dateTimeConverter(issue["created_at"]),
|
||||
"Updated At": dateTimeConverter(issue["updated_at"]),
|
||||
"Completed At": dateTimeConverter(issue["completed_at"]),
|
||||
"Archived At": dateTimeConverter(issue["archived_at"]),
|
||||
"Comments": issue["comments"],
|
||||
"Estimate": issue["estimate"],
|
||||
"Link": issue["link"],
|
||||
"Subscribers Count": issue["subscribers_count"],
|
||||
"Attachment Count": issue["attachment_count"],
|
||||
"Attachment Links": issue["attachment_links"],
|
||||
}
|
||||
|
||||
|
||||
def update_json_row(rows, row):
|
||||
def update_json_row(rows: List[dict], row: dict) -> None:
|
||||
"""
|
||||
Update the json row with the new assignee and label.
|
||||
"""
|
||||
matched_index = next(
|
||||
(
|
||||
index
|
||||
@@ -247,7 +287,10 @@ def update_json_row(rows, row):
|
||||
rows.append(row)
|
||||
|
||||
|
||||
def update_table_row(rows, row):
|
||||
def update_table_row(rows: List[List[str]], row: List[str]) -> None:
|
||||
"""
|
||||
Update the table row with the new assignee and label.
|
||||
"""
|
||||
matched_index = next(
|
||||
(index for index, existing_row in enumerate(rows) if existing_row[0] == row[0]),
|
||||
None,
|
||||
@@ -269,7 +312,12 @@ def update_table_row(rows, row):
|
||||
rows.append(row)
|
||||
|
||||
|
||||
def generate_csv(header, project_id, issues, files):
|
||||
def generate_csv(
|
||||
header: List[str],
|
||||
project_id: str,
|
||||
issues: List[dict],
|
||||
files: List[tuple[str, str | bytes]],
|
||||
) -> None:
|
||||
"""
|
||||
Generate CSV export for all the passed issues.
|
||||
"""
|
||||
@@ -281,7 +329,15 @@ def generate_csv(header, project_id, issues, files):
|
||||
files.append((f"{project_id}.csv", csv_file))
|
||||
|
||||
|
||||
def generate_json(header, project_id, issues, files):
|
||||
def generate_json(
|
||||
header: List[str],
|
||||
project_id: str,
|
||||
issues: List[dict],
|
||||
files: List[tuple[str, str | bytes]],
|
||||
) -> None:
|
||||
"""
|
||||
Generate JSON export for all the passed issues.
|
||||
"""
|
||||
rows = []
|
||||
for issue in issues:
|
||||
row = generate_json_row(issue)
|
||||
@@ -290,68 +346,169 @@ def generate_json(header, project_id, issues, files):
|
||||
files.append((f"{project_id}.json", json_file))
|
||||
|
||||
|
||||
def generate_xlsx(header, project_id, issues, files):
|
||||
def generate_xlsx(
|
||||
header: List[str],
|
||||
project_id: str,
|
||||
issues: List[dict],
|
||||
files: List[tuple[str, str | bytes]],
|
||||
) -> None:
|
||||
"""
|
||||
Generate XLSX export for all the passed issues.
|
||||
"""
|
||||
rows = [header]
|
||||
for issue in issues:
|
||||
row = generate_table_row(issue)
|
||||
|
||||
update_table_row(rows, row)
|
||||
xlsx_file = create_xlsx_file(rows)
|
||||
files.append((f"{project_id}.xlsx", xlsx_file))
|
||||
|
||||
|
||||
def get_created_by(obj: Issue | IssueComment) -> str:
|
||||
"""
|
||||
Get the created by user for the given object.
|
||||
"""
|
||||
if obj.created_by:
|
||||
return f"{obj.created_by.first_name} {obj.created_by.last_name}"
|
||||
return ""
|
||||
|
||||
|
||||
@shared_task
|
||||
def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, slug):
|
||||
def issue_export_task(
|
||||
provider: str,
|
||||
workspace_id: UUID,
|
||||
project_ids: List[str],
|
||||
token_id: str,
|
||||
multiple: bool,
|
||||
slug: str,
|
||||
):
|
||||
"""
|
||||
Export issues from the workspace.
|
||||
provider (str): The provider to export the issues to csv | json | xlsx.
|
||||
token_id (str): The export object token id.
|
||||
multiple (bool): Whether to export the issues to multiple files per project.
|
||||
"""
|
||||
try:
|
||||
exporter_instance = ExporterHistory.objects.get(token=token_id)
|
||||
exporter_instance.status = "processing"
|
||||
exporter_instance.save(update_fields=["status"])
|
||||
|
||||
# Base query to get the issues
|
||||
workspace_issues = (
|
||||
(
|
||||
Issue.objects.filter(
|
||||
workspace__id=workspace_id,
|
||||
project_id__in=project_ids,
|
||||
project__project_projectmember__member=exporter_instance.initiated_by_id,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__archived_at__isnull=True,
|
||||
)
|
||||
.select_related("project", "workspace", "state", "parent", "created_by")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "issue_cycle__cycle", "issue_module__module"
|
||||
)
|
||||
.values(
|
||||
"id",
|
||||
"project__identifier",
|
||||
"project__name",
|
||||
"project__id",
|
||||
"sequence_id",
|
||||
"name",
|
||||
"description_stripped",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"state__name",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"completed_at",
|
||||
"archived_at",
|
||||
"issue_cycle__cycle__name",
|
||||
"issue_cycle__cycle__start_date",
|
||||
"issue_cycle__cycle__end_date",
|
||||
"issue_module__module__name",
|
||||
"issue_module__module__start_date",
|
||||
"issue_module__module__target_date",
|
||||
"created_by__first_name",
|
||||
"created_by__last_name",
|
||||
"assignees__first_name",
|
||||
"assignees__last_name",
|
||||
"labels__name",
|
||||
)
|
||||
Issue.objects.filter(
|
||||
workspace__id=workspace_id,
|
||||
project_id__in=project_ids,
|
||||
project__project_projectmember__member=exporter_instance.initiated_by_id,
|
||||
project__project_projectmember__is_active=True,
|
||||
project__archived_at__isnull=True,
|
||||
)
|
||||
.select_related(
|
||||
"project",
|
||||
"workspace",
|
||||
"state",
|
||||
"parent",
|
||||
"created_by",
|
||||
"estimate_point",
|
||||
)
|
||||
.prefetch_related(
|
||||
"labels",
|
||||
"issue_cycle__cycle",
|
||||
"issue_module__module",
|
||||
"issue_comments",
|
||||
"assignees",
|
||||
Prefetch(
|
||||
"assignees",
|
||||
queryset=User.objects.only("first_name", "last_name").distinct(),
|
||||
to_attr="assignee_details",
|
||||
),
|
||||
Prefetch(
|
||||
"labels",
|
||||
queryset=Label.objects.only("name").distinct(),
|
||||
to_attr="label_details",
|
||||
),
|
||||
"issue_subscribers",
|
||||
"issue_link",
|
||||
)
|
||||
.order_by("project__identifier", "sequence_id")
|
||||
.distinct()
|
||||
)
|
||||
# CSV header
|
||||
|
||||
# Get the attachments for the issues
|
||||
file_assets = FileAsset.objects.filter(
|
||||
issue_id__in=workspace_issues.values_list("id", flat=True),
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
).annotate(work_item_id=F("issue_id"), asset_id=F("id"))
|
||||
|
||||
# Create a dictionary to store the attachments for the issues
|
||||
attachment_dict = defaultdict(list)
|
||||
for asset in file_assets:
|
||||
attachment_dict[asset.work_item_id].append(asset.asset_id)
|
||||
|
||||
# Create a list to store the issues data
|
||||
issues_data = []
|
||||
|
||||
# Iterate over the issues
|
||||
for issue in workspace_issues:
|
||||
attachments = attachment_dict.get(issue.id, [])
|
||||
|
||||
issue_data = {
|
||||
"id": issue.id,
|
||||
"project_identifier": issue.project.identifier,
|
||||
"project_name": issue.project.name,
|
||||
"project_id": issue.project.id,
|
||||
"sequence_id": issue.sequence_id,
|
||||
"name": issue.name,
|
||||
"description": issue.description_stripped,
|
||||
"priority": issue.priority,
|
||||
"start_date": issue.start_date,
|
||||
"target_date": issue.target_date,
|
||||
"state_name": issue.state.name if issue.state else None,
|
||||
"created_at": issue.created_at,
|
||||
"updated_at": issue.updated_at,
|
||||
"completed_at": issue.completed_at,
|
||||
"archived_at": issue.archived_at,
|
||||
"module_name": [
|
||||
module.module.name for module in issue.issue_module.all()
|
||||
],
|
||||
"created_by": get_created_by(issue),
|
||||
"labels": [label.name for label in issue.label_details],
|
||||
"comments": [
|
||||
{
|
||||
"comment": comment.comment_stripped,
|
||||
"created_at": dateConverter(comment.created_at),
|
||||
"created_by": get_created_by(comment),
|
||||
}
|
||||
for comment in issue.issue_comments.all()
|
||||
],
|
||||
"estimate": issue.estimate_point.value
|
||||
if issue.estimate_point and issue.estimate_point.value
|
||||
else "",
|
||||
"link": [link.url for link in issue.issue_link.all()],
|
||||
"assignees": [
|
||||
f"{assignee.first_name} {assignee.last_name}"
|
||||
for assignee in issue.assignee_details
|
||||
],
|
||||
"subscribers_count": issue.issue_subscribers.count(),
|
||||
"attachment_count": len(attachments),
|
||||
"attachment_links": [
|
||||
f"/api/assets/v2/workspaces/{issue.workspace.slug}/projects/{issue.project_id}/issues/{issue.id}/attachments/{asset}/"
|
||||
for asset in attachments
|
||||
],
|
||||
}
|
||||
|
||||
# Get Cycles data for the issue
|
||||
cycle = issue.issue_cycle.last()
|
||||
if cycle:
|
||||
# Update cycle data
|
||||
issue_data["cycle_name"] = cycle.cycle.name
|
||||
issue_data["cycle_start_date"] = dateConverter(cycle.cycle.start_date)
|
||||
issue_data["cycle_end_date"] = dateConverter(cycle.cycle.end_date)
|
||||
else:
|
||||
issue_data["cycle_name"] = ""
|
||||
issue_data["cycle_start_date"] = ""
|
||||
issue_data["cycle_end_date"] = ""
|
||||
|
||||
issues_data.append(issue_data)
|
||||
|
||||
# CSV header
|
||||
header = [
|
||||
"ID",
|
||||
"Project",
|
||||
@@ -362,20 +519,25 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
|
||||
"Target Date",
|
||||
"Priority",
|
||||
"Created By",
|
||||
"Assignee",
|
||||
"Labels",
|
||||
"Cycle Name",
|
||||
"Cycle Start Date",
|
||||
"Cycle End Date",
|
||||
"Module Name",
|
||||
"Module Start Date",
|
||||
"Module Target Date",
|
||||
"Created At",
|
||||
"Updated At",
|
||||
"Completed At",
|
||||
"Archived At",
|
||||
"Comments",
|
||||
"Estimate",
|
||||
"Link",
|
||||
"Assignees",
|
||||
"Subscribers Count",
|
||||
"Attachment Count",
|
||||
"Attachment Links",
|
||||
]
|
||||
|
||||
# Map the provider to the function
|
||||
EXPORTER_MAPPER = {
|
||||
"csv": generate_csv,
|
||||
"json": generate_json,
|
||||
@@ -384,8 +546,13 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
|
||||
|
||||
files = []
|
||||
if multiple:
|
||||
project_dict = defaultdict(list)
|
||||
for issue in issues_data:
|
||||
project_dict[str(issue["project_id"])].append(issue)
|
||||
|
||||
for project_id in project_ids:
|
||||
issues = workspace_issues.filter(project__id=project_id)
|
||||
issues = project_dict.get(str(project_id), [])
|
||||
|
||||
exporter = EXPORTER_MAPPER.get(provider)
|
||||
if exporter is not None:
|
||||
exporter(header, project_id, issues, files)
|
||||
@@ -393,7 +560,7 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
|
||||
else:
|
||||
exporter = EXPORTER_MAPPER.get(provider)
|
||||
if exporter is not None:
|
||||
exporter(header, workspace_id, workspace_issues, files)
|
||||
exporter(header, workspace_id, issues_data, files)
|
||||
|
||||
zip_buffer = create_zip_file(files)
|
||||
upload_to_s3(zip_buffer, workspace_id, token_id, slug)
|
||||
|
||||
@@ -63,7 +63,7 @@ def forgot_password(first_name, email, uidb64, token, current_site):
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -53,7 +53,7 @@ def magic_link(email, key, token):
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -80,7 +80,7 @@ def project_add_user_email(current_site, project_member_id, invitor_id):
|
||||
# Send the email
|
||||
msg.send()
|
||||
# Log the success
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -76,7 +76,7 @@ def project_invitation(email, project_id, token, current_site, invitor):
|
||||
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except (Project.DoesNotExist, ProjectMemberInvite.DoesNotExist):
|
||||
return
|
||||
|
||||
@@ -58,7 +58,7 @@ def user_activation_email(current_site, user_id):
|
||||
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
@@ -60,7 +60,7 @@ def user_deactivation_email(current_site, user_id):
|
||||
# Attach HTML content
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully.")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully.")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
|
||||
177
apiserver/plane/bgtasks/work_item_link_task.py
Normal file
177
apiserver/plane/bgtasks/work_item_link_task.py
Normal file
@@ -0,0 +1,177 @@
|
||||
# Python imports
|
||||
import logging
|
||||
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse, urljoin
|
||||
import base64
|
||||
import ipaddress
|
||||
from typing import Dict, Any
|
||||
from typing import Optional
|
||||
from plane.db.models import IssueLink
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
logger = logging.getLogger("plane.worker")
|
||||
|
||||
|
||||
DEFAULT_FAVICON = "PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyNCIgaGVpZ2h0PSIyNCIgdmlld0JveD0iMCAwIDI0IDI0IiBmaWxsPSJub25lIiBzdHJva2U9ImN1cnJlbnRDb2xvciIgc3Ryb2tlLXdpZHRoPSIyIiBzdHJva2UtbGluZWNhcD0icm91bmQiIHN0cm9rZS1saW5lam9pbj0icm91bmQiIGNsYXNzPSJsdWNpZGUgbHVjaWRlLWxpbmstaWNvbiBsdWNpZGUtbGluayI+PHBhdGggZD0iTTEwIDEzYTUgNSAwIDAgMCA3LjU0LjU0bDMtM2E1IDUgMCAwIDAtNy4wNy03LjA3bC0xLjcyIDEuNzEiLz48cGF0aCBkPSJNMTQgMTFhNSA1IDAgMCAwLTcuNTQtLjU0bC0zIDNhNSA1IDAgMCAwIDcuMDcgNy4wN2wxLjcxLTEuNzEiLz48L3N2Zz4=" # noqa: E501
|
||||
|
||||
def crawl_work_item_link_title_and_favicon(url: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Crawls a URL to extract the title and favicon.
|
||||
|
||||
Args:
|
||||
url (str): The URL to crawl
|
||||
|
||||
Returns:
|
||||
str: JSON string containing title and base64-encoded favicon
|
||||
"""
|
||||
try:
|
||||
# Prevent access to private IP ranges
|
||||
parsed = urlparse(url)
|
||||
|
||||
try:
|
||||
ip = ipaddress.ip_address(parsed.hostname)
|
||||
if ip.is_private or ip.is_loopback or ip.is_reserved:
|
||||
raise ValueError("Access to private/internal networks is not allowed")
|
||||
except ValueError:
|
||||
# Not an IP address, continue with domain validation
|
||||
pass
|
||||
|
||||
# Set up headers to mimic a real browser
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36" # noqa: E501
|
||||
}
|
||||
|
||||
soup = None
|
||||
title = None
|
||||
|
||||
try:
|
||||
response = requests.get(url, headers=headers, timeout=1)
|
||||
|
||||
soup = BeautifulSoup(response.content, "html.parser")
|
||||
title_tag = soup.find("title")
|
||||
title = title_tag.get_text().strip() if title_tag else None
|
||||
|
||||
except requests.RequestException as e:
|
||||
logger.warning(f"Failed to fetch HTML for title: {str(e)}")
|
||||
|
||||
# Fetch and encode favicon
|
||||
favicon_base64 = fetch_and_encode_favicon(headers, soup, url)
|
||||
|
||||
# Prepare result
|
||||
result = {
|
||||
"title": title,
|
||||
"favicon": favicon_base64["favicon_base64"],
|
||||
"url": url,
|
||||
"favicon_url": favicon_base64["favicon_url"],
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return {
|
||||
"error": f"Unexpected error: {str(e)}",
|
||||
"title": None,
|
||||
"favicon": None,
|
||||
"url": url,
|
||||
}
|
||||
|
||||
|
||||
def find_favicon_url(soup: Optional[BeautifulSoup], base_url: str) -> Optional[str]:
|
||||
"""
|
||||
Find the favicon URL from HTML soup.
|
||||
|
||||
Args:
|
||||
soup: BeautifulSoup object
|
||||
base_url: Base URL for resolving relative paths
|
||||
|
||||
Returns:
|
||||
str: Absolute URL to favicon or None
|
||||
"""
|
||||
|
||||
if soup is not None:
|
||||
# Look for various favicon link tags
|
||||
favicon_selectors = [
|
||||
'link[rel="icon"]',
|
||||
'link[rel="shortcut icon"]',
|
||||
'link[rel="apple-touch-icon"]',
|
||||
'link[rel="apple-touch-icon-precomposed"]',
|
||||
]
|
||||
|
||||
for selector in favicon_selectors:
|
||||
favicon_tag = soup.select_one(selector)
|
||||
if favicon_tag and favicon_tag.get("href"):
|
||||
return urljoin(base_url, favicon_tag["href"])
|
||||
|
||||
# Fallback to /favicon.ico
|
||||
parsed_url = urlparse(base_url)
|
||||
fallback_url = f"{parsed_url.scheme}://{parsed_url.netloc}/favicon.ico"
|
||||
|
||||
# Check if fallback exists
|
||||
try:
|
||||
response = requests.head(fallback_url, timeout=2)
|
||||
if response.status_code == 200:
|
||||
return fallback_url
|
||||
except requests.RequestException as e:
|
||||
log_exception(e)
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def fetch_and_encode_favicon(
|
||||
headers: Dict[str, str], soup: Optional[BeautifulSoup], url: str
|
||||
) -> Dict[str, Optional[str]]:
|
||||
"""
|
||||
Fetch favicon and encode it as base64.
|
||||
|
||||
Args:
|
||||
favicon_url: URL to the favicon
|
||||
headers: Request headers
|
||||
|
||||
Returns:
|
||||
str: Base64 encoded favicon with data URI prefix or None
|
||||
"""
|
||||
try:
|
||||
favicon_url = find_favicon_url(soup, url)
|
||||
if favicon_url is None:
|
||||
return {
|
||||
"favicon_url": None,
|
||||
"favicon_base64": f"data:image/svg+xml;base64,{DEFAULT_FAVICON}",
|
||||
}
|
||||
|
||||
response = requests.get(favicon_url, headers=headers, timeout=1)
|
||||
|
||||
# Get content type
|
||||
content_type = response.headers.get("content-type", "image/x-icon")
|
||||
|
||||
# Convert to base64
|
||||
favicon_base64 = base64.b64encode(response.content).decode("utf-8")
|
||||
|
||||
# Return as data URI
|
||||
return {
|
||||
"favicon_url": favicon_url,
|
||||
"favicon_base64": f"data:{content_type};base64,{favicon_base64}",
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch favicon: {e}")
|
||||
return {
|
||||
"favicon_url": None,
|
||||
"favicon_base64": f"data:image/svg+xml;base64,{DEFAULT_FAVICON}",
|
||||
}
|
||||
|
||||
|
||||
@shared_task
|
||||
def crawl_work_item_link_title(id: str, url: str) -> None:
|
||||
meta_data = crawl_work_item_link_title_and_favicon(url)
|
||||
issue_link = IssueLink.objects.get(id=id)
|
||||
|
||||
issue_link.metadata = meta_data
|
||||
|
||||
issue_link.save()
|
||||
@@ -78,7 +78,7 @@ def workspace_invitation(email, workspace_id, token, current_site, inviter):
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
logging.getLogger("plane").info("Email sent successfully")
|
||||
logging.getLogger("plane.worker").info("Email sent successfully")
|
||||
return
|
||||
except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist):
|
||||
return
|
||||
|
||||
@@ -5,7 +5,9 @@ from plane.db.models import Workspace
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Updates the slug of a soft-deleted workspace by appending the epoch timestamp"
|
||||
help = (
|
||||
"Updates the slug of a soft-deleted workspace by appending the epoch timestamp"
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
@@ -75,4 +77,4 @@ class Command(BaseCommand):
|
||||
self.style.ERROR(
|
||||
f"Error updating workspace '{workspace.name}': {str(e)}"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.14 on 2025-05-09 11:31
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0094_auto_20250425_0902'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='page',
|
||||
name='external_id',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='page',
|
||||
name='external_source',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.20 on 2025-05-21 13:43
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("db", "0095_page_external_id_page_external_source"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="is_email_valid",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="masked_at",
|
||||
field=models.DateTimeField(null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.21 on 2025-06-06 12:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0096_user_is_email_valid_user_masked_at'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='external_id',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='external_source',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
@@ -82,4 +82,4 @@ from .label import Label
|
||||
|
||||
from .device import Device, DeviceSession
|
||||
|
||||
from .sticky import Sticky
|
||||
from .sticky import Sticky
|
||||
|
||||
@@ -18,22 +18,28 @@ class BaseModel(AuditModel):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
user = get_current_user()
|
||||
def save(self, *args, created_by_id=None, disable_auto_set_user=False, **kwargs):
|
||||
if not disable_auto_set_user:
|
||||
# Check if created_by_id is provided
|
||||
if created_by_id:
|
||||
self.created_by_id = created_by_id
|
||||
else:
|
||||
user = get_current_user()
|
||||
|
||||
if user is None or user.is_anonymous:
|
||||
self.created_by = None
|
||||
self.updated_by = None
|
||||
super(BaseModel, self).save(*args, **kwargs)
|
||||
else:
|
||||
# Check if the model is being created or updated
|
||||
if self._state.adding:
|
||||
# If created only set created_by value: set updated_by to None
|
||||
self.created_by = user
|
||||
self.updated_by = None
|
||||
# If updated only set updated_by value don't touch created_by
|
||||
self.updated_by = user
|
||||
super(BaseModel, self).save(*args, **kwargs)
|
||||
if user is None or user.is_anonymous:
|
||||
self.created_by = None
|
||||
self.updated_by = None
|
||||
else:
|
||||
# Check if the model is being created or updated
|
||||
if self._state.adding:
|
||||
# If creating, set created_by and leave updated_by as None
|
||||
self.created_by = user
|
||||
self.updated_by = None
|
||||
else:
|
||||
# If updating, set updated_by only
|
||||
self.updated_by = user
|
||||
|
||||
super(BaseModel, self).save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.id)
|
||||
|
||||
@@ -17,6 +17,11 @@ def get_view_props():
|
||||
|
||||
|
||||
class Page(BaseModel):
|
||||
PRIVATE_ACCESS = 1
|
||||
PUBLIC_ACCESS = 0
|
||||
|
||||
ACCESS_CHOICES = ((PRIVATE_ACCESS, "Private"), (PUBLIC_ACCESS, "Public"))
|
||||
|
||||
workspace = models.ForeignKey(
|
||||
"db.Workspace", on_delete=models.CASCADE, related_name="pages"
|
||||
)
|
||||
@@ -53,6 +58,9 @@ class Page(BaseModel):
|
||||
moved_to_page = models.UUIDField(null=True, blank=True)
|
||||
moved_to_project = models.UUIDField(null=True, blank=True)
|
||||
|
||||
external_id = models.CharField(max_length=255, null=True, blank=True)
|
||||
external_source = models.CharField(max_length=255, null=True, blank=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Page"
|
||||
verbose_name_plural = "Pages"
|
||||
@@ -91,9 +99,7 @@ class PageLog(BaseModel):
|
||||
transaction = models.UUIDField(default=uuid.uuid4)
|
||||
page = models.ForeignKey(Page, related_name="page_log", on_delete=models.CASCADE)
|
||||
entity_identifier = models.UUIDField(null=True)
|
||||
entity_name = models.CharField(
|
||||
max_length=30, verbose_name="Transaction Type"
|
||||
)
|
||||
entity_name = models.CharField(max_length=30, verbose_name="Transaction Type")
|
||||
workspace = models.ForeignKey(
|
||||
"db.Workspace", on_delete=models.CASCADE, related_name="workspace_page_log"
|
||||
)
|
||||
|
||||
@@ -122,6 +122,9 @@ class Project(BaseModel):
|
||||
# timezone
|
||||
TIMEZONE_CHOICES = tuple(zip(pytz.all_timezones, pytz.all_timezones))
|
||||
timezone = models.CharField(max_length=255, default="UTC", choices=TIMEZONE_CHOICES)
|
||||
# external_id for imports
|
||||
external_source = models.CharField(max_length=255, null=True, blank=True)
|
||||
external_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
|
||||
@property
|
||||
def cover_image_url(self):
|
||||
|
||||
@@ -106,6 +106,12 @@ class User(AbstractBaseUser, PermissionsMixin):
|
||||
max_length=255, default="UTC", choices=USER_TIMEZONE_CHOICES
|
||||
)
|
||||
|
||||
# email validation
|
||||
is_email_valid = models.BooleanField(default=False)
|
||||
|
||||
# masking
|
||||
masked_at = models.DateTimeField(null=True)
|
||||
|
||||
USERNAME_FIELD = "email"
|
||||
REQUIRED_FIELDS = ["username"]
|
||||
|
||||
|
||||
@@ -153,12 +153,8 @@ class Workspace(BaseModel):
|
||||
return None
|
||||
|
||||
def delete(
|
||||
self,
|
||||
using: Optional[str] = None,
|
||||
soft: bool = True,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
):
|
||||
self, using: Optional[str] = None, soft: bool = True, *args: Any, **kwargs: Any
|
||||
):
|
||||
"""
|
||||
Override the delete method to append epoch timestamp to the slug when soft deleting.
|
||||
|
||||
@@ -172,7 +168,7 @@ class Workspace(BaseModel):
|
||||
result = super().delete(using=using, soft=soft, *args, **kwargs)
|
||||
|
||||
# If it's a soft delete and the model still exists (not hard deleted)
|
||||
if soft and hasattr(self, 'deleted_at') and self.deleted_at:
|
||||
if soft and hasattr(self, "deleted_at") and self.deleted_at:
|
||||
# Use the deleted_at timestamp to update the slug
|
||||
deletion_timestamp: int = int(self.deleted_at.timestamp())
|
||||
self.slug = f"{self.slug}__{deletion_timestamp}"
|
||||
|
||||
@@ -57,7 +57,7 @@ class InstanceEndpoint(BaseAPIView):
|
||||
POSTHOG_API_KEY,
|
||||
POSTHOG_HOST,
|
||||
UNSPLASH_ACCESS_KEY,
|
||||
OPENAI_API_KEY,
|
||||
LLM_API_KEY,
|
||||
IS_INTERCOM_ENABLED,
|
||||
INTERCOM_APP_ID,
|
||||
) = get_configuration_value(
|
||||
@@ -112,8 +112,8 @@ class InstanceEndpoint(BaseAPIView):
|
||||
"default": os.environ.get("UNSPLASH_ACCESS_KEY", ""),
|
||||
},
|
||||
{
|
||||
"key": "OPENAI_API_KEY",
|
||||
"default": os.environ.get("OPENAI_API_KEY", ""),
|
||||
"key": "LLM_API_KEY",
|
||||
"default": os.environ.get("LLM_API_KEY", ""),
|
||||
},
|
||||
# Intercom settings
|
||||
{
|
||||
@@ -151,7 +151,7 @@ class InstanceEndpoint(BaseAPIView):
|
||||
data["has_unsplash_configured"] = bool(UNSPLASH_ACCESS_KEY)
|
||||
|
||||
# Open AI settings
|
||||
data["has_openai_configured"] = bool(OPENAI_API_KEY)
|
||||
data["has_llm_configured"] = bool(LLM_API_KEY)
|
||||
|
||||
# File size settings
|
||||
data["file_size_limit"] = float(os.environ.get("FILE_SIZE_LIMIT", 5242880))
|
||||
|
||||
@@ -157,7 +157,7 @@ class Command(BaseCommand):
|
||||
},
|
||||
# Deprecated, use LLM_MODEL
|
||||
{
|
||||
"key": "GPT_ENGINE",
|
||||
"key": "GPT_ENGINE",
|
||||
"value": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"),
|
||||
"category": "SMTP",
|
||||
"is_encrypted": False,
|
||||
|
||||
@@ -83,6 +83,32 @@ class APITokenLogMiddleware:
|
||||
self.process_request(request, response, request_body)
|
||||
return response
|
||||
|
||||
def _safe_decode_body(self, content):
|
||||
"""
|
||||
Safely decodes request/response body content, handling binary data.
|
||||
Returns None if content is None, or a string representation of the content.
|
||||
"""
|
||||
# If the content is None, return None
|
||||
if content is None:
|
||||
return None
|
||||
|
||||
# If the content is an empty bytes object, return None
|
||||
if content == b"":
|
||||
return None
|
||||
|
||||
# Check if content is binary by looking for common binary file signatures
|
||||
if (
|
||||
content.startswith(b"\x89PNG")
|
||||
or content.startswith(b"\xff\xd8\xff")
|
||||
or content.startswith(b"%PDF")
|
||||
):
|
||||
return "[Binary Content]"
|
||||
|
||||
try:
|
||||
return content.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
return "[Could not decode content]"
|
||||
|
||||
def process_request(self, request, response, request_body):
|
||||
api_key_header = "X-Api-Key"
|
||||
api_key = request.headers.get(api_key_header)
|
||||
@@ -95,9 +121,13 @@ class APITokenLogMiddleware:
|
||||
method=request.method,
|
||||
query_params=request.META.get("QUERY_STRING", ""),
|
||||
headers=str(request.headers),
|
||||
body=(request_body.decode("utf-8") if request_body else None),
|
||||
body=(
|
||||
self._safe_decode_body(request_body) if request_body else None
|
||||
),
|
||||
response_body=(
|
||||
response.content.decode("utf-8") if response.content else None
|
||||
self._safe_decode_body(response.content)
|
||||
if response.content
|
||||
else None
|
||||
),
|
||||
response_code=response.status_code,
|
||||
ip_address=get_client_ip(request=request),
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# Python imports
|
||||
import os
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from urllib.parse import urljoin
|
||||
|
||||
# Third party imports
|
||||
import dj_database_url
|
||||
@@ -13,6 +13,10 @@ from django.core.management.utils import get_random_secret_key
|
||||
from corsheaders.defaults import default_headers
|
||||
|
||||
|
||||
# Module imports
|
||||
from plane.utils.url import is_valid_url
|
||||
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
# Secret Key
|
||||
@@ -310,15 +314,35 @@ CSRF_TRUSTED_ORIGINS = cors_allowed_origins
|
||||
CSRF_COOKIE_DOMAIN = os.environ.get("COOKIE_DOMAIN", None)
|
||||
CSRF_FAILURE_VIEW = "plane.authentication.views.common.csrf_failure"
|
||||
|
||||
# Base URLs
|
||||
###### Base URLs ######
|
||||
|
||||
# Admin Base URL
|
||||
ADMIN_BASE_URL = os.environ.get("ADMIN_BASE_URL", None)
|
||||
ADMIN_BASE_PATH = os.environ.get("ADMIN_BASE_PATH", None)
|
||||
if ADMIN_BASE_URL and not is_valid_url(ADMIN_BASE_URL):
|
||||
ADMIN_BASE_URL = None
|
||||
ADMIN_BASE_PATH = os.environ.get("ADMIN_BASE_PATH", "/god-mode/")
|
||||
|
||||
# Space Base URL
|
||||
SPACE_BASE_URL = os.environ.get("SPACE_BASE_URL", None)
|
||||
SPACE_BASE_PATH = os.environ.get("SPACE_BASE_PATH", None)
|
||||
APP_BASE_URL = os.environ.get("APP_BASE_URL")
|
||||
APP_BASE_PATH = os.environ.get("APP_BASE_PATH", None)
|
||||
LIVE_BASE_URL = os.environ.get("LIVE_BASE_URL")
|
||||
LIVE_BASE_PATH = os.environ.get("LIVE_BASE_PATH")
|
||||
if SPACE_BASE_URL and not is_valid_url(SPACE_BASE_URL):
|
||||
SPACE_BASE_URL = None
|
||||
SPACE_BASE_PATH = os.environ.get("SPACE_BASE_PATH", "/spaces/")
|
||||
|
||||
# App Base URL
|
||||
APP_BASE_URL = os.environ.get("APP_BASE_URL", None)
|
||||
if APP_BASE_URL and not is_valid_url(APP_BASE_URL):
|
||||
APP_BASE_URL = None
|
||||
APP_BASE_PATH = os.environ.get("APP_BASE_PATH", "/")
|
||||
|
||||
# Live Base URL
|
||||
LIVE_BASE_URL = os.environ.get("LIVE_BASE_URL", None)
|
||||
if LIVE_BASE_URL and not is_valid_url(LIVE_BASE_URL):
|
||||
LIVE_BASE_URL = None
|
||||
LIVE_BASE_PATH = os.environ.get("LIVE_BASE_PATH", "/live/")
|
||||
|
||||
LIVE_URL = urljoin(LIVE_BASE_URL, LIVE_BASE_PATH) if LIVE_BASE_URL else None
|
||||
|
||||
# WEB URL
|
||||
WEB_URL = os.environ.get("WEB_URL")
|
||||
|
||||
HARD_DELETE_AFTER_DAYS = int(os.environ.get("HARD_DELETE_AFTER_DAYS", 60))
|
||||
|
||||
@@ -32,7 +32,6 @@ class S3Storage(S3Boto3Storage):
|
||||
) or os.environ.get("MINIO_ENDPOINT_URL")
|
||||
|
||||
if os.environ.get("USE_MINIO") == "1":
|
||||
|
||||
# Determine protocol based on environment variable
|
||||
if os.environ.get("MINIO_ENDPOINT_SSL") == "1":
|
||||
endpoint_protocol = "https"
|
||||
|
||||
@@ -135,7 +135,7 @@ def issue_on_results(
|
||||
default=None,
|
||||
output_field=JSONField(),
|
||||
),
|
||||
filter=Q(votes__isnull=False,votes__deleted_at__isnull=True),
|
||||
filter=Q(votes__isnull=False, votes__deleted_at__isnull=True),
|
||||
distinct=True,
|
||||
),
|
||||
reaction_items=ArrayAgg(
|
||||
@@ -169,7 +169,9 @@ def issue_on_results(
|
||||
default=None,
|
||||
output_field=JSONField(),
|
||||
),
|
||||
filter=Q(issue_reactions__isnull=False, issue_reactions__deleted_at__isnull=True),
|
||||
filter=Q(
|
||||
issue_reactions__isnull=False, issue_reactions__deleted_at__isnull=True
|
||||
),
|
||||
distinct=True,
|
||||
),
|
||||
).values(*required_fields, "vote_items", "reaction_items")
|
||||
|
||||
@@ -21,6 +21,7 @@ from plane.app.serializers import (
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models.intake import SourceType
|
||||
|
||||
|
||||
class IntakeIssuePublicViewSet(BaseViewSet):
|
||||
@@ -156,7 +157,7 @@ class IntakeIssuePublicViewSet(BaseViewSet):
|
||||
intake_id=intake_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
issue=issue,
|
||||
source=request.data.get("source", "IN-APP"),
|
||||
source=SourceType.IN_APP,
|
||||
)
|
||||
|
||||
serializer = IssueStateIntakeSerializer(issue)
|
||||
|
||||
@@ -179,7 +179,7 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
|
||||
Q(issue_intake__status=1)
|
||||
| Q(issue_intake__status=-1)
|
||||
| Q(issue_intake__status=2)
|
||||
| Q(issue_intake__status=True),
|
||||
| Q(issue_intake__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
@@ -205,7 +205,7 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
|
||||
Q(issue_intake__status=1)
|
||||
| Q(issue_intake__status=-1)
|
||||
| Q(issue_intake__status=2)
|
||||
| Q(issue_intake__status=True),
|
||||
| Q(issue_intake__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
|
||||
@@ -14,9 +14,7 @@ class ProjectMetaDataEndpoint(BaseAPIView):
|
||||
|
||||
def get(self, request, anchor):
|
||||
try:
|
||||
deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
deploy_board = DeployBoard.objects.get(anchor=anchor, entity_name="project")
|
||||
except DeployBoard.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Project is not published"}, status=status.HTTP_404_NOT_FOUND
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user