mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Compare commits
94 Commits
chore/page
...
refactor/p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cf73ba87d8 | ||
|
|
a5a4cde1d7 | ||
|
|
05de4d83f3 | ||
|
|
423bc15119 | ||
|
|
666d35afb9 | ||
|
|
7ac07b7b73 | ||
|
|
a88f2e3cba | ||
|
|
d3556f457b | ||
|
|
c880e8b48c | ||
|
|
f93803ace8 | ||
|
|
a04fb07406 | ||
|
|
5d807db69e | ||
|
|
59fdd611e4 | ||
|
|
fb2b4ae303 | ||
|
|
de8da176d3 | ||
|
|
17ce1bceb6 | ||
|
|
1561b710ca | ||
|
|
9a4971efa4 | ||
|
|
2331404d46 | ||
|
|
a23c528396 | ||
|
|
dee57326a5 | ||
|
|
15918f2d9f | ||
|
|
8b6a48f05c | ||
|
|
1c849103f9 | ||
|
|
cdb932ab67 | ||
|
|
b1c7e6ae20 | ||
|
|
f5656111ee | ||
|
|
51758b774e | ||
|
|
d31aaee32c | ||
|
|
9af9268be6 | ||
|
|
c18a6a9654 | ||
|
|
282597bf83 | ||
|
|
b24e530816 | ||
|
|
ca9f3f2f5a | ||
|
|
028e70c4c1 | ||
|
|
30fdc1015c | ||
|
|
272428b05e | ||
|
|
911832d546 | ||
|
|
249e71e424 | ||
|
|
52d8d6e7ce | ||
|
|
93a22034bd | ||
|
|
453459d271 | ||
|
|
8c5f693214 | ||
|
|
4d17616670 | ||
|
|
c190bf3a6f | ||
|
|
e503c901ae | ||
|
|
188f8ff83f | ||
|
|
77b73dc032 | ||
|
|
20acb0dd17 | ||
|
|
7e66e2736b | ||
|
|
cad55f3234 | ||
|
|
87582604f7 | ||
|
|
dd65d03d33 | ||
|
|
97eea75cb7 | ||
|
|
ddfd953408 | ||
|
|
a428bc16c4 | ||
|
|
5322c0e57b | ||
|
|
81dfc15d1f | ||
|
|
6a00fcc253 | ||
|
|
f96e76dbbc | ||
|
|
de7dad59f0 | ||
|
|
1c901446ab | ||
|
|
e7d6e7d575 | ||
|
|
a2cdbd52dc | ||
|
|
608e193c36 | ||
|
|
830f0861c1 | ||
|
|
98ebe88c86 | ||
|
|
c8c86a33f8 | ||
|
|
ba4798deb9 | ||
|
|
463d0732e9 | ||
|
|
a8184c366a | ||
|
|
0a105a1c21 | ||
|
|
bf4f97d7f6 | ||
|
|
a9d9cbcb72 | ||
|
|
092e65b43d | ||
|
|
fc4ba5a170 | ||
|
|
9143e5abc8 | ||
|
|
1cb26fa863 | ||
|
|
9ff3c22089 | ||
|
|
653b1a7b30 | ||
|
|
d27590cd49 | ||
|
|
3cbc1dcf10 | ||
|
|
4d9cd0c318 | ||
|
|
87de913c76 | ||
|
|
b016e1d1b5 | ||
|
|
67bd14ceb5 | ||
|
|
4091e61953 | ||
|
|
ade6eded69 | ||
|
|
061a447734 | ||
|
|
10ef4e657f | ||
|
|
8a30c2c484 | ||
|
|
6636a64817 | ||
|
|
571a3d1239 | ||
|
|
49e65fbcb3 |
4
.github/workflows/build-aio-base.yml
vendored
4
.github/workflows/build-aio-base.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [base_build_setup]
|
||||
env:
|
||||
BASE_IMG_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-aio-base:${{ needs.base_build_setup.outputs.gh_branch_name }}
|
||||
BASE_IMG_TAG: makeplane/plane-aio-base:${{ needs.base_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.base_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.base_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.base_build_setup.outputs.gh_buildx_version }}
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
- name: Set Docker Tag
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-aio-base:latest
|
||||
TAG=makeplane/plane-aio-base:latest
|
||||
else
|
||||
TAG=${{ env.BASE_IMG_TAG }}
|
||||
fi
|
||||
|
||||
32
.github/workflows/build-branch.yml
vendored
32
.github/workflows/build-branch.yml
vendored
@@ -14,7 +14,7 @@ env:
|
||||
|
||||
jobs:
|
||||
branch_build_setup:
|
||||
name: Build-Push Web/Space/API/Proxy Docker Image
|
||||
name: Build Setup
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||
@@ -85,7 +85,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
FRONTEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
FRONTEND_TAG: makeplane/plane-frontend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
@@ -95,9 +95,9 @@ jobs:
|
||||
- name: Set Frontend Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ github.event.release.tag_name }}
|
||||
TAG=makeplane/plane-frontend:stable,makeplane/plane-frontend:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:latest
|
||||
TAG=makeplane/plane-frontend:latest
|
||||
else
|
||||
TAG=${{ env.FRONTEND_TAG }}
|
||||
fi
|
||||
@@ -137,7 +137,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
ADMIN_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-admin:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
ADMIN_TAG: makeplane/plane-admin:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
@@ -147,9 +147,9 @@ jobs:
|
||||
- name: Set Admin Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-admin:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-admin:${{ github.event.release.tag_name }}
|
||||
TAG=makeplane/plane-admin:stable,makeplane/plane-admin:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-admin:latest
|
||||
TAG=makeplane/plane-admin:latest
|
||||
else
|
||||
TAG=${{ env.ADMIN_TAG }}
|
||||
fi
|
||||
@@ -189,7 +189,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
SPACE_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
SPACE_TAG: makeplane/plane-space:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
@@ -199,9 +199,9 @@ jobs:
|
||||
- name: Set Space Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ github.event.release.tag_name }}
|
||||
TAG=makeplane/plane-space:stable,makeplane/plane-space:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest
|
||||
TAG=makeplane/plane-space:latest
|
||||
else
|
||||
TAG=${{ env.SPACE_TAG }}
|
||||
fi
|
||||
@@ -241,7 +241,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BACKEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BACKEND_TAG: makeplane/plane-backend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
@@ -251,9 +251,9 @@ jobs:
|
||||
- name: Set Backend Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ github.event.release.tag_name }}
|
||||
TAG=makeplane/plane-backend:stable,makeplane/plane-backend:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:latest
|
||||
TAG=makeplane/plane-backend:latest
|
||||
else
|
||||
TAG=${{ env.BACKEND_TAG }}
|
||||
fi
|
||||
@@ -293,7 +293,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
PROXY_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
PROXY_TAG: makeplane/plane-proxy:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||
@@ -303,9 +303,9 @@ jobs:
|
||||
- name: Set Proxy Docker Tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ github.event.release.tag_name }}
|
||||
TAG=makeplane/plane-proxy:stable,makeplane/plane-proxy:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:latest
|
||||
TAG=makeplane/plane-proxy:latest
|
||||
else
|
||||
TAG=${{ env.PROXY_TAG }}
|
||||
fi
|
||||
|
||||
@@ -3,10 +3,11 @@ name: Build and Lint on Pull Request
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
types: ["opened", "synchronize"]
|
||||
types: ["opened", "synchronize", "ready_for_review"]
|
||||
|
||||
jobs:
|
||||
get-changed-files:
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
apiserver_changed: ${{ steps.changed-files.outputs.apiserver_any_changed }}
|
||||
|
||||
2
.github/workflows/create-sync-pr.yml
vendored
2
.github/workflows/create-sync-pr.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
else
|
||||
echo "MATCH=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
Auto_Merge:
|
||||
Create_PR:
|
||||
if: ${{ needs.Check_Branch.outputs.BRANCH_MATCH == 'true' }}
|
||||
needs: [Check_Branch]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
2
.github/workflows/repo-sync.yml
vendored
2
.github/workflows/repo-sync.yml
vendored
@@ -4,7 +4,7 @@ on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- preview
|
||||
- develop
|
||||
|
||||
env:
|
||||
SOURCE_BRANCH_NAME: ${{ github.ref_name }}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
"use client";
|
||||
import { FC } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { Lightbulb } from "lucide-react";
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
"use client";
|
||||
|
||||
import { FC, useState } from "react";
|
||||
import isEmpty from "lodash/isEmpty";
|
||||
import Link from "next/link";
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
"use client";
|
||||
import { FC, useState } from "react";
|
||||
import isEmpty from "lodash/isEmpty";
|
||||
import Link from "next/link";
|
||||
|
||||
@@ -7,12 +7,12 @@ import { useTheme } from "next-themes";
|
||||
import useSWR from "swr";
|
||||
import { Mails, KeyRound } from "lucide-react";
|
||||
import { TInstanceConfigurationKeys } from "@plane/types";
|
||||
import { Loader, setPromiseToast } from "@plane/ui";
|
||||
import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
|
||||
// components
|
||||
import { PageHeader } from "@/components/core";
|
||||
// hooks
|
||||
// helpers
|
||||
import { resolveGeneralTheme } from "@/helpers/common.helper";
|
||||
import { cn, resolveGeneralTheme } from "@/helpers/common.helper";
|
||||
import { useInstance } from "@/hooks/store";
|
||||
// images
|
||||
import githubLightModeImage from "@/public/logos/github-black.png";
|
||||
@@ -45,6 +45,8 @@ const InstanceAuthenticationPage = observer(() => {
|
||||
const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
|
||||
// theme
|
||||
const { resolvedTheme } = useTheme();
|
||||
// derived values
|
||||
const enableSignUpConfig = formattedConfig?.ENABLE_SIGNUP ?? "";
|
||||
|
||||
const updateConfig = async (key: TInstanceConfigurationKeys, value: string) => {
|
||||
setIsSubmitting(true);
|
||||
@@ -129,7 +131,34 @@ const InstanceAuthenticationPage = observer(() => {
|
||||
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
|
||||
{formattedConfig ? (
|
||||
<div className="space-y-3">
|
||||
<div className="text-lg font-medium">Authentication modes</div>
|
||||
<div className="text-lg font-medium pb-1">Sign-up configuration</div>
|
||||
<div className={cn("w-full flex items-center gap-14 rounded")}>
|
||||
<div className="flex grow items-center gap-4">
|
||||
<div className="grow">
|
||||
<div className={cn("font-medium leading-5 text-custom-text-100 text-sm")}>
|
||||
Allow anyone to sign up without invite
|
||||
</div>
|
||||
<div className={cn("font-normal leading-5 text-custom-text-300 text-xs")}>
|
||||
Toggling this off will disable self sign ups.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className={`shrink-0 pr-4 ${isSubmitting && "opacity-70"}`}>
|
||||
<div className="flex items-center gap-4">
|
||||
<ToggleSwitch
|
||||
value={Boolean(parseInt(enableSignUpConfig))}
|
||||
onChange={() => {
|
||||
Boolean(parseInt(enableSignUpConfig)) === true
|
||||
? updateConfig("ENABLE_SIGNUP", "0")
|
||||
: updateConfig("ENABLE_SIGNUP", "1");
|
||||
}}
|
||||
size="sm"
|
||||
disabled={isSubmitting}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="text-lg font-medium pt-6">Authentication modes</div>
|
||||
{authenticationMethodsCard.map((method) => (
|
||||
<AuthenticationMethodCard
|
||||
key={method.key}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
"use client";
|
||||
|
||||
import React, { FC, useMemo, useState } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
// types
|
||||
|
||||
@@ -10,6 +10,6 @@ export const metadata: Metadata = {
|
||||
title: "Email Settings - God Mode",
|
||||
};
|
||||
|
||||
const EmailLayout = ({ children }: EmailLayoutProps) => <AdminLayout>{children}</AdminLayout>;
|
||||
|
||||
export default EmailLayout;
|
||||
export default function EmailLayout({ children }: EmailLayoutProps) {
|
||||
return <AdminLayout>{children}</AdminLayout>;
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
"use client";
|
||||
import { FC } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { IFormattedInstanceConfiguration, TInstanceImageConfigurationKeys } from "@plane/types";
|
||||
|
||||
@@ -10,6 +10,6 @@ export const metadata: Metadata = {
|
||||
title: "Images Settings - God Mode",
|
||||
};
|
||||
|
||||
const ImageLayout = ({ children }: ImageLayoutProps) => <AdminLayout>{children}</AdminLayout>;
|
||||
|
||||
export default ImageLayout;
|
||||
export default function ImageLayout({ children }: ImageLayoutProps) {
|
||||
return <AdminLayout>{children}</AdminLayout>;
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ import { UserProvider } from "@/lib/user-provider";
|
||||
// styles
|
||||
import "./globals.css";
|
||||
|
||||
function RootLayout({ children }: { children: ReactNode }) {
|
||||
export default function RootLayout({ children }: { children: ReactNode }) {
|
||||
// themes
|
||||
const { resolvedTheme } = useTheme();
|
||||
|
||||
@@ -44,5 +44,3 @@ function RootLayout({ children }: { children: ReactNode }) {
|
||||
</html>
|
||||
);
|
||||
}
|
||||
|
||||
export default RootLayout;
|
||||
|
||||
@@ -5,9 +5,11 @@ import { observer } from "mobx-react-lite";
|
||||
import Link from "next/link";
|
||||
import { ExternalLink, FileText, HelpCircle, MoveLeft } from "lucide-react";
|
||||
import { Transition } from "@headlessui/react";
|
||||
// ui
|
||||
import { DiscordIcon, GithubIcon, Tooltip } from "@plane/ui";
|
||||
// helpers
|
||||
import { WEB_BASE_URL, cn } from "@/helpers/common.helper";
|
||||
// hooks
|
||||
import { WEB_BASE_URL } from "@/helpers/common.helper";
|
||||
import { useTheme } from "@/hooks/store";
|
||||
// assets
|
||||
import packageJson from "package.json";
|
||||
@@ -42,9 +44,12 @@ export const HelpSection: FC = observer(() => {
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`flex w-full items-center justify-between gap-1 self-baseline border-t border-custom-sidebar-border-200 bg-custom-sidebar-background-100 px-4 py-2 ${
|
||||
isSidebarCollapsed ? "flex-col" : ""
|
||||
}`}
|
||||
className={cn(
|
||||
"flex w-full items-center justify-between gap-1 self-baseline border-t border-custom-border-200 bg-custom-sidebar-background-100 px-4 h-14 flex-shrink-0",
|
||||
{
|
||||
"flex-col h-auto py-1.5": isSidebarCollapsed,
|
||||
}
|
||||
)}
|
||||
>
|
||||
<div className={`flex items-center gap-1 ${isSidebarCollapsed ? "flex-col justify-center" : "w-full"}`}>
|
||||
<Tooltip tooltipContent="Redirect to plane" position="right" className="ml-4" disabled={!isSidebarCollapsed}>
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
"use client";
|
||||
|
||||
import Link from "next/link";
|
||||
import { Tooltip } from "@plane/ui";
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
"use client";
|
||||
|
||||
import React from "react";
|
||||
import Link from "next/link";
|
||||
// headless ui
|
||||
@@ -43,33 +45,22 @@ export const ConfirmDiscardModal: React.FC<Props> = (props) => {
|
||||
<div className="px-4 pb-4 pt-5 sm:p-6 sm:pb-4">
|
||||
<div className="sm:flex sm:items-start">
|
||||
<div className="mt-3 text-center sm:mt-0 sm:text-left">
|
||||
<Dialog.Title
|
||||
as="h3"
|
||||
className="text-lg font-medium leading-6 text-custom-text-300"
|
||||
>
|
||||
<Dialog.Title as="h3" className="text-lg font-medium leading-6 text-custom-text-300">
|
||||
You have unsaved changes
|
||||
</Dialog.Title>
|
||||
<div className="mt-2">
|
||||
<p className="text-sm text-custom-text-400">
|
||||
Changes you made will be lost if you go back. Do you
|
||||
wish to go back?
|
||||
Changes you made will be lost if you go back. Do you wish to go back?
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex justify-end items-center p-4 sm:px-6 gap-2">
|
||||
<Button
|
||||
variant="neutral-primary"
|
||||
size="sm"
|
||||
onClick={handleClose}
|
||||
>
|
||||
<Button variant="neutral-primary" size="sm" onClick={handleClose}>
|
||||
Keep editing
|
||||
</Button>
|
||||
<Link
|
||||
href={onDiscardHref}
|
||||
className={getButtonStyling("primary", "sm")}
|
||||
>
|
||||
<Link href={onDiscardHref} className={getButtonStyling("primary", "sm")}>
|
||||
Go back
|
||||
</Link>
|
||||
</div>
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
"use client";
|
||||
|
||||
import React from "react";
|
||||
import Image from "next/image";
|
||||
import { Button } from "@plane/ui";
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
"use client";
|
||||
|
||||
import { useTheme } from "next-themes";
|
||||
// ui
|
||||
import { Toast as ToastComponent } from "@plane/ui";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "admin",
|
||||
"version": "0.20.0",
|
||||
"version": "0.21.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "turbo run develop",
|
||||
@@ -14,7 +14,6 @@
|
||||
"@headlessui/react": "^1.7.19",
|
||||
"@plane/types": "*",
|
||||
"@plane/ui": "*",
|
||||
"@plane/constants": "*",
|
||||
"@tailwindcss/typography": "^0.5.9",
|
||||
"@types/lodash": "^4.17.0",
|
||||
"autoprefixer": "10.4.14",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{
|
||||
"name": "plane-api",
|
||||
"version": "0.20.0"
|
||||
"version": "0.21.0"
|
||||
}
|
||||
|
||||
@@ -784,6 +784,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
def post(self, request, slug, project_id, cycle_id):
|
||||
new_cycle_id = request.data.get("new_cycle_id", False)
|
||||
plot_type = request.GET.get("plot_type", "issues")
|
||||
|
||||
if not new_cycle_id:
|
||||
return Response(
|
||||
@@ -865,6 +866,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
queryset=old_cycle.first(),
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type=plot_type,
|
||||
cycle_id=cycle_id,
|
||||
)
|
||||
|
||||
|
||||
@@ -182,7 +182,6 @@ class IssueAPIEndpoint(BaseAPIView):
|
||||
issue_queryset = (
|
||||
self.get_queryset()
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(module_id=F("issue_module__module_id"))
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
|
||||
@@ -22,7 +22,7 @@ from plane.db.models import (
|
||||
IssueProperty,
|
||||
Module,
|
||||
Project,
|
||||
ProjectDeployBoard,
|
||||
DeployBoard,
|
||||
ProjectMember,
|
||||
State,
|
||||
Workspace,
|
||||
@@ -99,7 +99,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
.annotate(
|
||||
is_deployed=Exists(
|
||||
ProjectDeployBoard.objects.filter(
|
||||
DeployBoard.objects.filter(
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
|
||||
@@ -30,7 +30,7 @@ from .project import (
|
||||
ProjectIdentifierSerializer,
|
||||
ProjectLiteSerializer,
|
||||
ProjectMemberLiteSerializer,
|
||||
ProjectDeployBoardSerializer,
|
||||
DeployBoardSerializer,
|
||||
ProjectMemberAdminSerializer,
|
||||
ProjectPublicMemberSerializer,
|
||||
ProjectMemberRoleSerializer,
|
||||
|
||||
@@ -66,6 +66,7 @@ class CycleSerializer(BaseSerializer):
|
||||
"external_source",
|
||||
"external_id",
|
||||
"progress_snapshot",
|
||||
"logo_props",
|
||||
# meta fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
|
||||
@@ -2,19 +2,11 @@
|
||||
from .base import BaseSerializer
|
||||
|
||||
from plane.db.models import Estimate, EstimatePoint
|
||||
from plane.app.serializers import (
|
||||
WorkspaceLiteSerializer,
|
||||
ProjectLiteSerializer,
|
||||
)
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
class EstimateSerializer(BaseSerializer):
|
||||
workspace_detail = WorkspaceLiteSerializer(
|
||||
read_only=True, source="workspace"
|
||||
)
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
|
||||
class Meta:
|
||||
model = Estimate
|
||||
@@ -48,10 +40,6 @@ class EstimatePointSerializer(BaseSerializer):
|
||||
|
||||
class EstimateReadSerializer(BaseSerializer):
|
||||
points = EstimatePointSerializer(read_only=True, many=True)
|
||||
workspace_detail = WorkspaceLiteSerializer(
|
||||
read_only=True, source="workspace"
|
||||
)
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
|
||||
class Meta:
|
||||
model = Estimate
|
||||
|
||||
@@ -177,6 +177,8 @@ class ModuleSerializer(DynamicBaseSerializer):
|
||||
started_issues = serializers.IntegerField(read_only=True)
|
||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_issues = serializers.IntegerField(read_only=True)
|
||||
total_estimate_points = serializers.IntegerField(read_only=True)
|
||||
completed_estimate_points = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Module
|
||||
@@ -199,7 +201,10 @@ class ModuleSerializer(DynamicBaseSerializer):
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
"logo_props",
|
||||
# computed fields
|
||||
"total_estimate_points",
|
||||
"completed_estimate_points",
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
|
||||
@@ -39,6 +39,7 @@ class PageSerializer(BaseSerializer):
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"view_props",
|
||||
"logo_props",
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
|
||||
@@ -13,7 +13,7 @@ from plane.db.models import (
|
||||
ProjectMember,
|
||||
ProjectMemberInvite,
|
||||
ProjectIdentifier,
|
||||
ProjectDeployBoard,
|
||||
DeployBoard,
|
||||
ProjectPublicMember,
|
||||
)
|
||||
|
||||
@@ -114,7 +114,7 @@ class ProjectListSerializer(DynamicBaseSerializer):
|
||||
is_member = serializers.BooleanField(read_only=True)
|
||||
sort_order = serializers.FloatField(read_only=True)
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
is_deployed = serializers.BooleanField(read_only=True)
|
||||
anchor = serializers.CharField(read_only=True)
|
||||
members = serializers.SerializerMethodField()
|
||||
|
||||
def get_members(self, obj):
|
||||
@@ -148,7 +148,7 @@ class ProjectDetailSerializer(BaseSerializer):
|
||||
is_member = serializers.BooleanField(read_only=True)
|
||||
sort_order = serializers.FloatField(read_only=True)
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
is_deployed = serializers.BooleanField(read_only=True)
|
||||
anchor = serializers.CharField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
@@ -206,14 +206,14 @@ class ProjectMemberLiteSerializer(BaseSerializer):
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class ProjectDeployBoardSerializer(BaseSerializer):
|
||||
class DeployBoardSerializer(BaseSerializer):
|
||||
project_details = ProjectLiteSerializer(read_only=True, source="project")
|
||||
workspace_detail = WorkspaceLiteSerializer(
|
||||
read_only=True, source="workspace"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = ProjectDeployBoard
|
||||
model = DeployBoard
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
|
||||
@@ -4,6 +4,7 @@ from django.urls import path
|
||||
from plane.app.views import (
|
||||
ProjectEstimatePointEndpoint,
|
||||
BulkEstimatePointEndpoint,
|
||||
EstimatePointEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -34,4 +35,23 @@ urlpatterns = [
|
||||
),
|
||||
name="bulk-create-estimate-points",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/<uuid:estimate_id>/estimate-points/",
|
||||
EstimatePointEndpoint.as_view(
|
||||
{
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="estimate-points",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/<uuid:estimate_id>/estimate-points/<estimate_point_id>/",
|
||||
EstimatePointEndpoint.as_view(
|
||||
{
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="estimate-points",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -19,6 +19,8 @@ from plane.app.views import (
|
||||
IssueUserDisplayPropertyEndpoint,
|
||||
IssueViewSet,
|
||||
LabelViewSet,
|
||||
BulkIssueOperationsEndpoint,
|
||||
BulkArchiveIssuesEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
@@ -81,6 +83,11 @@ urlpatterns = [
|
||||
BulkDeleteIssuesEndpoint.as_view(),
|
||||
name="project-issues-bulk",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-archive-issues/",
|
||||
BulkArchiveIssuesEndpoint.as_view(),
|
||||
name="bulk-archive-issues",
|
||||
),
|
||||
##
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
|
||||
@@ -298,4 +305,9 @@ urlpatterns = [
|
||||
),
|
||||
name="project-issue-draft",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-operation-issues/",
|
||||
BulkIssueOperationsEndpoint.as_view(),
|
||||
name="bulk-operations-issues",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -2,6 +2,7 @@ from django.urls import path
|
||||
|
||||
from plane.app.views import (
|
||||
ProjectViewSet,
|
||||
DeployBoardViewSet,
|
||||
ProjectInvitationsViewset,
|
||||
ProjectMemberViewSet,
|
||||
ProjectMemberUserEndpoint,
|
||||
@@ -12,7 +13,6 @@ from plane.app.views import (
|
||||
ProjectFavoritesViewSet,
|
||||
UserProjectInvitationsViewset,
|
||||
ProjectPublicCoverImagesEndpoint,
|
||||
ProjectDeployBoardViewSet,
|
||||
UserProjectRolesEndpoint,
|
||||
ProjectArchiveUnarchiveEndpoint,
|
||||
)
|
||||
@@ -157,7 +157,7 @@ urlpatterns = [
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/",
|
||||
ProjectDeployBoardViewSet.as_view(
|
||||
DeployBoardViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
@@ -167,7 +167,7 @@ urlpatterns = [
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/<uuid:pk>/",
|
||||
ProjectDeployBoardViewSet.as_view(
|
||||
DeployBoardViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
|
||||
@@ -4,7 +4,7 @@ from .project.base import (
|
||||
ProjectUserViewsEndpoint,
|
||||
ProjectFavoritesViewSet,
|
||||
ProjectPublicCoverImagesEndpoint,
|
||||
ProjectDeployBoardViewSet,
|
||||
DeployBoardViewSet,
|
||||
ProjectArchiveUnarchiveEndpoint,
|
||||
)
|
||||
|
||||
@@ -113,9 +113,7 @@ from .issue.activity import (
|
||||
IssueActivityEndpoint,
|
||||
)
|
||||
|
||||
from .issue.archive import (
|
||||
IssueArchiveViewSet,
|
||||
)
|
||||
from .issue.archive import IssueArchiveViewSet, BulkArchiveIssuesEndpoint
|
||||
|
||||
from .issue.attachment import (
|
||||
IssueAttachmentEndpoint,
|
||||
@@ -154,6 +152,8 @@ from .issue.subscriber import (
|
||||
)
|
||||
|
||||
|
||||
from .issue.bulk_operations import BulkIssueOperationsEndpoint
|
||||
|
||||
from .module.base import (
|
||||
ModuleViewSet,
|
||||
ModuleLinkViewSet,
|
||||
@@ -190,6 +190,7 @@ from .external.base import (
|
||||
from .estimate.base import (
|
||||
ProjectEstimatePointEndpoint,
|
||||
BulkEstimatePointEndpoint,
|
||||
EstimatePointEndpoint,
|
||||
)
|
||||
|
||||
from .inbox.base import InboxViewSet, InboxIssueViewSet
|
||||
|
||||
@@ -33,7 +33,7 @@ class AnalyticsEndpoint(BaseAPIView):
|
||||
"state__group",
|
||||
"labels__id",
|
||||
"assignees__id",
|
||||
"estimate_point",
|
||||
"estimate_point__value",
|
||||
"issue_cycle__cycle_id",
|
||||
"issue_module__module_id",
|
||||
"priority",
|
||||
@@ -381,9 +381,9 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
open_estimate_sum = open_issues_queryset.aggregate(
|
||||
sum=Sum("estimate_point")
|
||||
sum=Sum("point")
|
||||
)["sum"]
|
||||
total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))[
|
||||
total_estimate_sum = base_issues.aggregate(sum=Sum("point"))[
|
||||
"sum"
|
||||
]
|
||||
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
# Python imports
|
||||
import traceback
|
||||
|
||||
import zoneinfo
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
@@ -76,7 +78,11 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
||||
response = super().handle_exception(exc)
|
||||
return response
|
||||
except Exception as e:
|
||||
print(e) if settings.DEBUG else print("Server Error")
|
||||
(
|
||||
print(e, traceback.format_exc())
|
||||
if settings.DEBUG
|
||||
else print("Server Error")
|
||||
)
|
||||
if isinstance(e, IntegrityError):
|
||||
return Response(
|
||||
{"error": "The payload is not valid"},
|
||||
|
||||
@@ -177,6 +177,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
plot_type = request.GET.get("plot_type", "issues")
|
||||
if pk is None:
|
||||
queryset = (
|
||||
self.get_queryset()
|
||||
@@ -375,6 +376,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
queryset=queryset,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type=plot_type,
|
||||
cycle_id=pk,
|
||||
)
|
||||
|
||||
|
||||
@@ -17,8 +17,11 @@ from django.db.models import (
|
||||
UUIDField,
|
||||
Value,
|
||||
When,
|
||||
Subquery,
|
||||
Sum,
|
||||
IntegerField,
|
||||
)
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.db.models.functions import Coalesce, Cast
|
||||
from django.utils import timezone
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
@@ -73,6 +76,89 @@ class CycleViewSet(BaseViewSet):
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
backlog_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="backlog",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
backlog_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.values("backlog_estimate_point")[:1]
|
||||
)
|
||||
unstarted_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="unstarted",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
unstarted_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.values("unstarted_estimate_point")[:1]
|
||||
)
|
||||
started_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="started",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
started_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.values("started_estimate_point")[:1]
|
||||
)
|
||||
cancelled_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="cancelled",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
cancelled_estimate_point=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.values("cancelled_estimate_point")[:1]
|
||||
)
|
||||
completed_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="completed",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
completed_estimate_points=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.values("completed_estimate_points")[:1]
|
||||
)
|
||||
total_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
issue_cycle__cycle_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_cycle__cycle_id")
|
||||
.annotate(
|
||||
total_estimate_points=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.values("total_estimate_points")[:1]
|
||||
)
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
@@ -197,12 +283,49 @@ class CycleViewSet(BaseViewSet):
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_estimate_points=Coalesce(
|
||||
Subquery(backlog_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
unstarted_estimate_points=Coalesce(
|
||||
Subquery(unstarted_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
started_estimate_points=Coalesce(
|
||||
Subquery(started_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
cancelled_estimate_points=Coalesce(
|
||||
Subquery(cancelled_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_estimate_points=Coalesce(
|
||||
Subquery(completed_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
total_estimate_points=Coalesce(
|
||||
Subquery(total_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
.order_by("-is_favorite", "name")
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def list(self, request, slug, project_id):
|
||||
queryset = self.get_queryset().filter(archived_at__isnull=True)
|
||||
plot_type = request.GET.get("plot_type", "issues")
|
||||
cycle_view = request.GET.get("cycle_view", "all")
|
||||
|
||||
# Update the order by
|
||||
@@ -231,7 +354,14 @@ class CycleViewSet(BaseViewSet):
|
||||
"external_source",
|
||||
"external_id",
|
||||
"progress_snapshot",
|
||||
"logo_props",
|
||||
# meta fields
|
||||
"backlog_estimate_points",
|
||||
"unstarted_estimate_points",
|
||||
"started_estimate_points",
|
||||
"cancelled_estimate_points",
|
||||
"completed_estimate_points",
|
||||
"total_estimate_points",
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
@@ -334,6 +464,7 @@ class CycleViewSet(BaseViewSet):
|
||||
queryset=queryset.first(),
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type=plot_type,
|
||||
cycle_id=data[0]["id"],
|
||||
)
|
||||
)
|
||||
@@ -356,7 +487,10 @@ class CycleViewSet(BaseViewSet):
|
||||
"external_source",
|
||||
"external_id",
|
||||
"progress_snapshot",
|
||||
"logo_props",
|
||||
# meta fields
|
||||
"completed_estimate_points",
|
||||
"total_estimate_points",
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
@@ -403,6 +537,7 @@ class CycleViewSet(BaseViewSet):
|
||||
"external_source",
|
||||
"external_id",
|
||||
"progress_snapshot",
|
||||
"logo_props",
|
||||
# meta fields
|
||||
"is_favorite",
|
||||
"cancelled_issues",
|
||||
@@ -496,6 +631,7 @@ class CycleViewSet(BaseViewSet):
|
||||
"external_source",
|
||||
"external_id",
|
||||
"progress_snapshot",
|
||||
"logo_props",
|
||||
# meta fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
@@ -523,6 +659,7 @@ class CycleViewSet(BaseViewSet):
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def retrieve(self, request, slug, project_id, pk):
|
||||
plot_type = request.GET.get("plot_type", "issues")
|
||||
queryset = (
|
||||
self.get_queryset().filter(archived_at__isnull=True).filter(pk=pk)
|
||||
)
|
||||
@@ -556,6 +693,7 @@ class CycleViewSet(BaseViewSet):
|
||||
"external_id",
|
||||
"progress_snapshot",
|
||||
"sub_issues",
|
||||
"logo_props",
|
||||
# meta fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
@@ -677,6 +815,7 @@ class CycleViewSet(BaseViewSet):
|
||||
queryset=queryset,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type=plot_type,
|
||||
cycle_id=pk,
|
||||
)
|
||||
|
||||
@@ -793,6 +932,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
|
||||
def post(self, request, slug, project_id, cycle_id):
|
||||
new_cycle_id = request.data.get("new_cycle_id", False)
|
||||
plot_type = request.GET.get("plot_type", "issues")
|
||||
|
||||
if not new_cycle_id:
|
||||
return Response(
|
||||
@@ -874,6 +1014,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
queryset=old_cycle.first(),
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type=plot_type,
|
||||
cycle_id=cycle_id,
|
||||
)
|
||||
|
||||
|
||||
@@ -2,43 +2,50 @@
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.db.models import (
|
||||
Func,
|
||||
F,
|
||||
Q,
|
||||
OuterRef,
|
||||
Value,
|
||||
UUIDField,
|
||||
)
|
||||
from django.core import serializers
|
||||
from django.db.models import (
|
||||
F,
|
||||
Func,
|
||||
OuterRef,
|
||||
Q,
|
||||
)
|
||||
from django.utils import timezone
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models.functions import Coalesce
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from plane.app.permissions import (
|
||||
ProjectEntityPermission,
|
||||
)
|
||||
# Module imports
|
||||
from .. import BaseViewSet
|
||||
from plane.app.serializers import (
|
||||
IssueSerializer,
|
||||
CycleIssueSerializer,
|
||||
)
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
CycleIssue,
|
||||
Issue,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
IssueLink,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
issue_on_results,
|
||||
issue_queryset_grouper,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.user_timezone_converter import user_timezone_converter
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import (
|
||||
GroupedOffsetPaginator,
|
||||
SubGroupedOffsetPaginator,
|
||||
)
|
||||
|
||||
# Module imports
|
||||
|
||||
class CycleIssueViewSet(BaseViewSet):
|
||||
serializer_class = CycleIssueSerializer
|
||||
@@ -86,14 +93,9 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
def list(self, request, slug, project_id, cycle_id):
|
||||
fields = [
|
||||
field
|
||||
for field in request.GET.get("fields", "").split(",")
|
||||
if field
|
||||
]
|
||||
order_by = request.GET.get("order_by", "created_at")
|
||||
order_by_param = request.GET.get("order_by", "created_at")
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
queryset = (
|
||||
issue_queryset = (
|
||||
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
||||
.filter(project_id=project_id)
|
||||
.filter(workspace__slug=slug)
|
||||
@@ -105,7 +107,6 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
"issue_module__module",
|
||||
"issue_cycle__cycle",
|
||||
)
|
||||
.order_by(order_by)
|
||||
.filter(**filters)
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(
|
||||
@@ -130,73 +131,112 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
.order_by(order_by)
|
||||
)
|
||||
if self.fields:
|
||||
issues = IssueSerializer(
|
||||
queryset, many=True, fields=fields if fields else None
|
||||
).data
|
||||
else:
|
||||
issues = queryset.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
)
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
issues = user_timezone_converter(
|
||||
issues, datetime_fields, request.user.user_timezone
|
||||
)
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
issue_queryset = issue_queryset.filter(**filters)
|
||||
# Issue queryset
|
||||
issue_queryset, order_by_param = order_issue_queryset(
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
|
||||
# issue queryset
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset,
|
||||
group_by=group_by,
|
||||
sub_group_by=sub_group_by,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
# Check group and sub group value paginate
|
||||
if sub_group_by:
|
||||
if group_by == sub_group_by:
|
||||
return Response(
|
||||
{
|
||||
"error": "Group by and sub group by cannot have same parameters"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
else:
|
||||
# group and sub group pagination
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=SubGroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
sub_group_by_fields=issue_group_values(
|
||||
field=sub_group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
sub_group_by_field_name=sub_group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
# Group Paginate
|
||||
else:
|
||||
# Group paginate
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
else:
|
||||
# List Paginate
|
||||
return self.paginate(
|
||||
order_by=order_by_param,
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues, sub_group_by=sub_group_by
|
||||
),
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, cycle_id):
|
||||
issues = request.data.get("issues", [])
|
||||
|
||||
@@ -1,52 +1,53 @@
|
||||
# Django imports
|
||||
from django.db.models import (
|
||||
Q,
|
||||
Case,
|
||||
When,
|
||||
Value,
|
||||
CharField,
|
||||
Count,
|
||||
F,
|
||||
Exists,
|
||||
OuterRef,
|
||||
Subquery,
|
||||
JSONField,
|
||||
Func,
|
||||
Prefetch,
|
||||
IntegerField,
|
||||
)
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import UUIDField
|
||||
from django.db.models import (
|
||||
Case,
|
||||
CharField,
|
||||
Count,
|
||||
Exists,
|
||||
F,
|
||||
Func,
|
||||
IntegerField,
|
||||
JSONField,
|
||||
OuterRef,
|
||||
Prefetch,
|
||||
Q,
|
||||
Subquery,
|
||||
UUIDField,
|
||||
Value,
|
||||
When,
|
||||
)
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils import timezone
|
||||
from rest_framework import status
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from plane.app.serializers import (
|
||||
DashboardSerializer,
|
||||
IssueActivitySerializer,
|
||||
IssueSerializer,
|
||||
WidgetSerializer,
|
||||
)
|
||||
from plane.db.models import (
|
||||
Dashboard,
|
||||
DashboardWidget,
|
||||
Issue,
|
||||
IssueActivity,
|
||||
IssueAttachment,
|
||||
IssueLink,
|
||||
IssueRelation,
|
||||
Project,
|
||||
ProjectMember,
|
||||
User,
|
||||
Widget,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
# Module imports
|
||||
from .. import BaseAPIView
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueActivity,
|
||||
ProjectMember,
|
||||
Widget,
|
||||
DashboardWidget,
|
||||
Dashboard,
|
||||
Project,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
IssueRelation,
|
||||
User,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
IssueActivitySerializer,
|
||||
IssueSerializer,
|
||||
DashboardSerializer,
|
||||
WidgetSerializer,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
|
||||
def dashboard_overview_stats(self, request, slug):
|
||||
@@ -569,6 +570,7 @@ def dashboard_recent_collaborators(self, request, slug):
|
||||
)
|
||||
|
||||
return self.paginate(
|
||||
order_by=request.GET.get("order_by", "-created_at"),
|
||||
request=request,
|
||||
queryset=project_members_with_activities,
|
||||
controller=lambda qs: self.get_results_controller(qs, slug),
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import random
|
||||
import string
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
@@ -5,7 +8,7 @@ from rest_framework import status
|
||||
# Module imports
|
||||
from ..base import BaseViewSet, BaseAPIView
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import Project, Estimate, EstimatePoint
|
||||
from plane.db.models import Project, Estimate, EstimatePoint, Issue
|
||||
from plane.app.serializers import (
|
||||
EstimateSerializer,
|
||||
EstimatePointSerializer,
|
||||
@@ -13,6 +16,12 @@ from plane.app.serializers import (
|
||||
)
|
||||
from plane.utils.cache import invalidate_cache
|
||||
|
||||
|
||||
def generate_random_name(length=10):
|
||||
letters = string.ascii_lowercase
|
||||
return "".join(random.choice(letters) for i in range(length))
|
||||
|
||||
|
||||
class ProjectEstimatePointEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
@@ -49,13 +58,17 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
||||
serializer = EstimateReadSerializer(estimates, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@invalidate_cache(path="/api/workspaces/:slug/estimates/", url_params=True, user=False)
|
||||
@invalidate_cache(
|
||||
path="/api/workspaces/:slug/estimates/", url_params=True, user=False
|
||||
)
|
||||
def create(self, request, slug, project_id):
|
||||
if not request.data.get("estimate", False):
|
||||
return Response(
|
||||
{"error": "Estimate is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
estimate = request.data.get('estimate')
|
||||
estimate_name = estimate.get("name", generate_random_name())
|
||||
estimate_type = estimate.get("type", 'categories')
|
||||
last_used = estimate.get("last_used", False)
|
||||
estimate = Estimate.objects.create(
|
||||
name=estimate_name, project_id=project_id, last_used=last_used, type=estimate_type
|
||||
)
|
||||
|
||||
estimate_points = request.data.get("estimate_points", [])
|
||||
|
||||
@@ -67,14 +80,6 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
estimate_serializer = EstimateSerializer(
|
||||
data=request.data.get("estimate")
|
||||
)
|
||||
if not estimate_serializer.is_valid():
|
||||
return Response(
|
||||
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
estimate = estimate_serializer.save(project_id=project_id)
|
||||
estimate_points = EstimatePoint.objects.bulk_create(
|
||||
[
|
||||
EstimatePoint(
|
||||
@@ -93,17 +98,8 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
estimate_point_serializer = EstimatePointSerializer(
|
||||
estimate_points, many=True
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"estimate": estimate_serializer.data,
|
||||
"estimate_points": estimate_point_serializer.data,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
serializer = EstimateReadSerializer(estimate)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def retrieve(self, request, slug, project_id, estimate_id):
|
||||
estimate = Estimate.objects.get(
|
||||
@@ -115,13 +111,10 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@invalidate_cache(path="/api/workspaces/:slug/estimates/", url_params=True, user=False)
|
||||
@invalidate_cache(
|
||||
path="/api/workspaces/:slug/estimates/", url_params=True, user=False
|
||||
)
|
||||
def partial_update(self, request, slug, project_id, estimate_id):
|
||||
if not request.data.get("estimate", False):
|
||||
return Response(
|
||||
{"error": "Estimate is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not len(request.data.get("estimate_points", [])):
|
||||
return Response(
|
||||
@@ -131,15 +124,10 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
||||
|
||||
estimate = Estimate.objects.get(pk=estimate_id)
|
||||
|
||||
estimate_serializer = EstimateSerializer(
|
||||
estimate, data=request.data.get("estimate"), partial=True
|
||||
)
|
||||
if not estimate_serializer.is_valid():
|
||||
return Response(
|
||||
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
estimate = estimate_serializer.save()
|
||||
if request.data.get("estimate"):
|
||||
estimate.name = request.data.get("estimate").get("name", estimate.name)
|
||||
estimate.type = request.data.get("estimate").get("type", estimate.type)
|
||||
estimate.save()
|
||||
|
||||
estimate_points_data = request.data.get("estimate_points", [])
|
||||
|
||||
@@ -165,29 +153,113 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
||||
estimate_point.value = estimate_point_data[0].get(
|
||||
"value", estimate_point.value
|
||||
)
|
||||
estimate_point.key = estimate_point_data[0].get(
|
||||
"key", estimate_point.key
|
||||
)
|
||||
updated_estimate_points.append(estimate_point)
|
||||
|
||||
EstimatePoint.objects.bulk_update(
|
||||
updated_estimate_points,
|
||||
["value"],
|
||||
["key", "value"],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
estimate_point_serializer = EstimatePointSerializer(
|
||||
estimate_points, many=True
|
||||
)
|
||||
estimate_serializer = EstimateReadSerializer(estimate)
|
||||
return Response(
|
||||
{
|
||||
"estimate": estimate_serializer.data,
|
||||
"estimate_points": estimate_point_serializer.data,
|
||||
},
|
||||
estimate_serializer.data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@invalidate_cache(path="/api/workspaces/:slug/estimates/", url_params=True, user=False)
|
||||
@invalidate_cache(
|
||||
path="/api/workspaces/:slug/estimates/", url_params=True, user=False
|
||||
)
|
||||
def destroy(self, request, slug, project_id, estimate_id):
|
||||
estimate = Estimate.objects.get(
|
||||
pk=estimate_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
estimate.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class EstimatePointEndpoint(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def create(self, request, slug, project_id, estimate_id):
|
||||
# TODO: add a key validation if the same key already exists
|
||||
if not request.data.get("key") or not request.data.get("value"):
|
||||
return Response(
|
||||
{"error": "Key and value are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
key = request.data.get("key", 0)
|
||||
value = request.data.get("value", "")
|
||||
estimate_point = EstimatePoint.objects.create(
|
||||
estimate_id=estimate_id,
|
||||
project_id=project_id,
|
||||
key=key,
|
||||
value=value,
|
||||
)
|
||||
serializer = EstimatePointSerializer(estimate_point).data
|
||||
return Response(serializer, status=status.HTTP_200_OK)
|
||||
|
||||
def partial_update(self, request, slug, project_id, estimate_id, estimate_point_id):
|
||||
# TODO: add a key validation if the same key already exists
|
||||
estimate_point = EstimatePoint.objects.get(
|
||||
pk=estimate_point_id,
|
||||
estimate_id=estimate_id,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
serializer = EstimatePointSerializer(
|
||||
estimate_point, data=request.data, partial=True
|
||||
)
|
||||
if not serializer.is_valid():
|
||||
return Response(
|
||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def destroy(
|
||||
self, request, slug, project_id, estimate_id, estimate_point_id
|
||||
):
|
||||
new_estimate_id = request.GET.get("new_estimate_id", None)
|
||||
estimate_points = EstimatePoint.objects.filter(
|
||||
estimate_id=estimate_id,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
# update all the issues with the new estimate
|
||||
if new_estimate_id:
|
||||
_ = Issue.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
estimate_id=estimate_point_id,
|
||||
).update(estimate_id=new_estimate_id)
|
||||
|
||||
# delete the estimate point
|
||||
old_estimate_point = EstimatePoint.objects.filter(
|
||||
pk=estimate_point_id
|
||||
).first()
|
||||
|
||||
# rearrange the estimate points
|
||||
updated_estimate_points = []
|
||||
for estimate_point in estimate_points:
|
||||
if estimate_point.key > old_estimate_point.key:
|
||||
estimate_point.key -= 1
|
||||
updated_estimate_points.append(estimate_point)
|
||||
|
||||
EstimatePoint.objects.bulk_update(
|
||||
updated_estimate_points,
|
||||
["key"],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
old_estimate_point.delete()
|
||||
|
||||
return Response(
|
||||
EstimatePointSerializer(updated_estimate_points, many=True).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from plane.app.permissions import WorkSpaceAdminPermission
|
||||
from plane.app.serializers import ExporterHistorySerializer
|
||||
from plane.bgtasks.export_task import issue_export_task
|
||||
from plane.db.models import ExporterHistory, Project, Workspace
|
||||
|
||||
# Module imports
|
||||
from .. import BaseAPIView
|
||||
from plane.app.permissions import WorkSpaceAdminPermission
|
||||
from plane.bgtasks.export_task import issue_export_task
|
||||
from plane.db.models import Project, ExporterHistory, Workspace
|
||||
|
||||
from plane.app.serializers import ExporterHistorySerializer
|
||||
|
||||
|
||||
class ExportIssuesEndpoint(BaseAPIView):
|
||||
@@ -72,6 +72,7 @@ class ExportIssuesEndpoint(BaseAPIView):
|
||||
"cursor", False
|
||||
):
|
||||
return self.paginate(
|
||||
order_by=request.GET.get("order_by", "-created_at"),
|
||||
request=request,
|
||||
queryset=exporter_history,
|
||||
on_results=lambda exporter_history: ExporterHistorySerializer(
|
||||
|
||||
@@ -2,52 +2,54 @@
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.db.models import (
|
||||
Prefetch,
|
||||
OuterRef,
|
||||
Func,
|
||||
F,
|
||||
Q,
|
||||
Case,
|
||||
Value,
|
||||
CharField,
|
||||
When,
|
||||
Exists,
|
||||
Max,
|
||||
UUIDField,
|
||||
)
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.models import (
|
||||
F,
|
||||
Func,
|
||||
OuterRef,
|
||||
Q,
|
||||
Prefetch,
|
||||
Exists,
|
||||
)
|
||||
from django.utils import timezone
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models.functions import Coalesce
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet
|
||||
from plane.app.serializers import (
|
||||
IssueSerializer,
|
||||
IssueFlatSerializer,
|
||||
IssueDetailSerializer,
|
||||
)
|
||||
from plane.app.permissions import (
|
||||
ProjectEntityPermission,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
IssueFlatSerializer,
|
||||
IssueSerializer,
|
||||
IssueDetailSerializer
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
IssueLink,
|
||||
IssueSubscriber,
|
||||
IssueReaction,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
issue_on_results,
|
||||
issue_queryset_grouper,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.user_timezone_converter import user_timezone_converter
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import (
|
||||
GroupedOffsetPaginator,
|
||||
SubGroupedOffsetPaginator,
|
||||
)
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet, BaseAPIView
|
||||
|
||||
|
||||
class IssueArchiveViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
@@ -92,33 +94,6 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
@@ -126,125 +101,116 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
show_sub_issues = request.GET.get("show_sub_issues", "true")
|
||||
|
||||
# Custom ordering for priority and state
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
state_order = [
|
||||
"backlog",
|
||||
"unstarted",
|
||||
"started",
|
||||
"completed",
|
||||
"cancelled",
|
||||
]
|
||||
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
|
||||
issue_queryset = self.get_queryset().filter(**filters)
|
||||
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
priority_order = (
|
||||
priority_order
|
||||
if order_by_param == "priority"
|
||||
else priority_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
|
||||
# State Ordering
|
||||
elif order_by_param in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
"-state__name",
|
||||
"-state__group",
|
||||
]:
|
||||
state_order = (
|
||||
state_order
|
||||
if order_by_param in ["state__name", "state__group"]
|
||||
else state_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
state_order=Case(
|
||||
*[
|
||||
When(state__group=state_group, then=Value(i))
|
||||
for i, state_group in enumerate(state_order)
|
||||
],
|
||||
default=Value(len(state_order)),
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("state_order")
|
||||
# assignee and label ordering
|
||||
elif order_by_param in [
|
||||
"labels__name",
|
||||
"-labels__name",
|
||||
"assignees__first_name",
|
||||
"-assignees__first_name",
|
||||
]:
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
max_values=Max(
|
||||
order_by_param[1::]
|
||||
if order_by_param.startswith("-")
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-max_values"
|
||||
if order_by_param.startswith("-")
|
||||
else "max_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
issue_queryset = (
|
||||
issue_queryset
|
||||
if show_sub_issues == "true"
|
||||
else issue_queryset.filter(parent__isnull=True)
|
||||
)
|
||||
if self.expand or self.fields:
|
||||
issues = IssueSerializer(
|
||||
issue_queryset,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
).data
|
||||
else:
|
||||
issues = issue_queryset.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
)
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
issues = user_timezone_converter(
|
||||
issues, datetime_fields, request.user.user_timezone
|
||||
)
|
||||
# Issue queryset
|
||||
issue_queryset, order_by_param = order_issue_queryset(
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
|
||||
# issue queryset
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset,
|
||||
group_by=group_by,
|
||||
sub_group_by=sub_group_by,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
# Check group and sub group value paginate
|
||||
if sub_group_by:
|
||||
if group_by == sub_group_by:
|
||||
return Response(
|
||||
{
|
||||
"error": "Group by and sub group by cannot have same parameters"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
else:
|
||||
# group and sub group pagination
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=SubGroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
sub_group_by_fields=issue_group_values(
|
||||
field=sub_group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
sub_group_by_field_name=sub_group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
# Group Paginate
|
||||
else:
|
||||
# Group paginate
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
else:
|
||||
# List Paginate
|
||||
return self.paginate(
|
||||
order_by=order_by_param,
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues, sub_group_by=sub_group_by
|
||||
),
|
||||
)
|
||||
|
||||
def retrieve(self, request, slug, project_id, pk=None):
|
||||
issue = (
|
||||
@@ -351,3 +317,58 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
issue.save()
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class BulkArchiveIssuesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def post(self, request, slug, project_id):
|
||||
issue_ids = request.data.get("issue_ids", [])
|
||||
|
||||
if not len(issue_ids):
|
||||
return Response(
|
||||
{"error": "Issue IDs are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
issues = Issue.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk__in=issue_ids
|
||||
).select_related("state")
|
||||
bulk_archive_issues = []
|
||||
for issue in issues:
|
||||
if issue.state.group not in ["completed", "cancelled"]:
|
||||
return Response(
|
||||
{
|
||||
"error_code": 4091,
|
||||
"error_message": "INVALID_ARCHIVE_STATE_GROUP"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
"archived_at": str(timezone.now().date()),
|
||||
"automation": False,
|
||||
}
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
issue.archived_at = timezone.now().date()
|
||||
bulk_archive_issues.append(issue)
|
||||
Issue.objects.bulk_update(bulk_archive_issues, ["archived_at"])
|
||||
|
||||
return Response(
|
||||
{"archived_at": str(timezone.now().date())},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@@ -1,34 +1,30 @@
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.models import (
|
||||
Case,
|
||||
CharField,
|
||||
Exists,
|
||||
F,
|
||||
Func,
|
||||
Max,
|
||||
OuterRef,
|
||||
Prefetch,
|
||||
Q,
|
||||
UUIDField,
|
||||
Value,
|
||||
When,
|
||||
)
|
||||
from django.db.models.functions import Coalesce
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
from rest_framework import status
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.app.permissions import (
|
||||
ProjectEntityPermission,
|
||||
ProjectLitePermission,
|
||||
@@ -49,11 +45,21 @@ from plane.db.models import (
|
||||
IssueSubscriber,
|
||||
Project,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
issue_on_results,
|
||||
issue_queryset_grouper,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import (
|
||||
GroupedOffsetPaginator,
|
||||
SubGroupedOffsetPaginator,
|
||||
)
|
||||
from .. import BaseAPIView, BaseViewSet
|
||||
from plane.utils.user_timezone_converter import user_timezone_converter
|
||||
|
||||
# Module imports
|
||||
from .. import BaseAPIView, BaseViewSet
|
||||
|
||||
|
||||
class IssueListEndpoint(BaseAPIView):
|
||||
@@ -105,110 +111,28 @@ class IssueListEndpoint(BaseAPIView):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
).distinct()
|
||||
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
# Custom ordering for priority and state
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
state_order = [
|
||||
"backlog",
|
||||
"unstarted",
|
||||
"started",
|
||||
"completed",
|
||||
"cancelled",
|
||||
]
|
||||
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
|
||||
issue_queryset = queryset.filter(**filters)
|
||||
# Issue queryset
|
||||
issue_queryset, _ = order_issue_queryset(
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
priority_order = (
|
||||
priority_order
|
||||
if order_by_param == "priority"
|
||||
else priority_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
|
||||
# State Ordering
|
||||
elif order_by_param in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
"-state__name",
|
||||
"-state__group",
|
||||
]:
|
||||
state_order = (
|
||||
state_order
|
||||
if order_by_param in ["state__name", "state__group"]
|
||||
else state_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
state_order=Case(
|
||||
*[
|
||||
When(state__group=state_group, then=Value(i))
|
||||
for i, state_group in enumerate(state_order)
|
||||
],
|
||||
default=Value(len(state_order)),
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("state_order")
|
||||
# assignee and label ordering
|
||||
elif order_by_param in [
|
||||
"labels__name",
|
||||
"-labels__name",
|
||||
"assignees__first_name",
|
||||
"-assignees__first_name",
|
||||
]:
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
max_values=Max(
|
||||
order_by_param[1::]
|
||||
if order_by_param.startswith("-")
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-max_values"
|
||||
if order_by_param.startswith("-")
|
||||
else "max_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
# issue queryset
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset,
|
||||
group_by=group_by,
|
||||
sub_group_by=sub_group_by,
|
||||
)
|
||||
|
||||
if self.fields or self.expand:
|
||||
issues = IssueSerializer(
|
||||
@@ -304,33 +228,6 @@ class IssueViewSet(BaseViewSet):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
).distinct()
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
@@ -340,116 +237,104 @@ class IssueViewSet(BaseViewSet):
|
||||
|
||||
issue_queryset = self.get_queryset().filter(**filters)
|
||||
# Custom ordering for priority and state
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
state_order = [
|
||||
"backlog",
|
||||
"unstarted",
|
||||
"started",
|
||||
"completed",
|
||||
"cancelled",
|
||||
]
|
||||
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
priority_order = (
|
||||
priority_order
|
||||
if order_by_param == "priority"
|
||||
else priority_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
# Issue queryset
|
||||
issue_queryset, order_by_param = order_issue_queryset(
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
# State Ordering
|
||||
elif order_by_param in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
"-state__name",
|
||||
"-state__group",
|
||||
]:
|
||||
state_order = (
|
||||
state_order
|
||||
if order_by_param in ["state__name", "state__group"]
|
||||
else state_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
state_order=Case(
|
||||
*[
|
||||
When(state__group=state_group, then=Value(i))
|
||||
for i, state_group in enumerate(state_order)
|
||||
],
|
||||
default=Value(len(state_order)),
|
||||
output_field=CharField(),
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
|
||||
# issue queryset
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset,
|
||||
group_by=group_by,
|
||||
sub_group_by=sub_group_by,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
if sub_group_by:
|
||||
if group_by == sub_group_by:
|
||||
return Response(
|
||||
{
|
||||
"error": "Group by and sub group by cannot have same parameters"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
else:
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=SubGroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
sub_group_by_fields=issue_group_values(
|
||||
field=sub_group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
sub_group_by_field_name=sub_group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
else:
|
||||
# Group paginate
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
).order_by("state_order")
|
||||
# assignee and label ordering
|
||||
elif order_by_param in [
|
||||
"labels__name",
|
||||
"-labels__name",
|
||||
"assignees__first_name",
|
||||
"-assignees__first_name",
|
||||
]:
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
max_values=Max(
|
||||
order_by_param[1::]
|
||||
if order_by_param.startswith("-")
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-max_values"
|
||||
if order_by_param.startswith("-")
|
||||
else "max_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
# Only use serializer when expand or fields else return by values
|
||||
if self.expand or self.fields:
|
||||
issues = IssueSerializer(
|
||||
issue_queryset,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data
|
||||
else:
|
||||
issues = issue_queryset.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
return self.paginate(
|
||||
order_by=order_by_param,
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues, sub_group_by=sub_group_by
|
||||
),
|
||||
)
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
issues = user_timezone_converter(
|
||||
issues, datetime_fields, request.user.user_timezone
|
||||
)
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
@@ -481,8 +366,13 @@ class IssueViewSet(BaseViewSet):
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
issue = (
|
||||
self.get_queryset()
|
||||
.filter(pk=serializer.data["id"])
|
||||
issue_queryset_grouper(
|
||||
queryset=self.get_queryset().filter(
|
||||
pk=serializer.data["id"]
|
||||
),
|
||||
group_by=None,
|
||||
sub_group_by=None,
|
||||
)
|
||||
.values(
|
||||
"id",
|
||||
"name",
|
||||
@@ -523,6 +413,33 @@ class IssueViewSet(BaseViewSet):
|
||||
issue = (
|
||||
self.get_queryset()
|
||||
.filter(pk=pk)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
|
||||
288
apiserver/plane/app/views/issue/bulk_operations.py
Normal file
288
apiserver/plane/app/views/issue/bulk_operations.py
Normal file
@@ -0,0 +1,288 @@
|
||||
# Python imports
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
# Module imports
|
||||
from .. import BaseAPIView
|
||||
from plane.app.permissions import (
|
||||
ProjectEntityPermission,
|
||||
)
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
Issue,
|
||||
IssueLabel,
|
||||
IssueAssignee,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
|
||||
|
||||
class BulkIssueOperationsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def post(self, request, slug, project_id):
|
||||
issue_ids = request.data.get("issue_ids", [])
|
||||
if not len(issue_ids):
|
||||
return Response(
|
||||
{"error": "Issue IDs are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get all the issues
|
||||
issues = (
|
||||
Issue.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk__in=issue_ids
|
||||
)
|
||||
.select_related("state")
|
||||
.prefetch_related("labels", "assignees")
|
||||
)
|
||||
# Current epoch
|
||||
epoch = int(timezone.now().timestamp())
|
||||
|
||||
# Project details
|
||||
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
||||
workspace_id = project.workspace_id
|
||||
|
||||
# Initialize arrays
|
||||
bulk_update_issues = []
|
||||
bulk_issue_activities = []
|
||||
bulk_update_issue_labels = []
|
||||
bulk_update_issue_assignees = []
|
||||
|
||||
properties = request.data.get("properties", {})
|
||||
|
||||
if properties.get("start_date", False) and properties.get("target_date", False):
|
||||
if (
|
||||
datetime.strptime(properties.get("start_date"), "%Y-%m-%d").date()
|
||||
> datetime.strptime(properties.get("target_date"), "%Y-%m-%d").date()
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error_code": 4100,
|
||||
"error_message": "INVALID_ISSUE_DATES",
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
for issue in issues:
|
||||
|
||||
# Priority
|
||||
if properties.get("priority", False):
|
||||
bulk_issue_activities.append(
|
||||
{
|
||||
"type": "issue.activity.updated",
|
||||
"requested_data": json.dumps(
|
||||
{"priority": properties.get("priority")}
|
||||
),
|
||||
"current_instance": json.dumps(
|
||||
{"priority": (issue.priority)}
|
||||
),
|
||||
"issue_id": str(issue.id),
|
||||
"actor_id": str(request.user.id),
|
||||
"project_id": str(project_id),
|
||||
"epoch": epoch,
|
||||
}
|
||||
)
|
||||
issue.priority = properties.get("priority")
|
||||
|
||||
# State
|
||||
if properties.get("state_id", False):
|
||||
bulk_issue_activities.append(
|
||||
{
|
||||
"type": "issue.activity.updated",
|
||||
"requested_data": json.dumps(
|
||||
{"state": properties.get("state")}
|
||||
),
|
||||
"current_instance": json.dumps(
|
||||
{"state": str(issue.state_id)}
|
||||
),
|
||||
"issue_id": str(issue.id),
|
||||
"actor_id": str(request.user.id),
|
||||
"project_id": str(project_id),
|
||||
"epoch": epoch,
|
||||
}
|
||||
)
|
||||
issue.state_id = properties.get("state_id")
|
||||
|
||||
# Start date
|
||||
if properties.get("start_date", False):
|
||||
if (
|
||||
issue.target_date
|
||||
and not properties.get("target_date", False)
|
||||
and issue.target_date
|
||||
<= datetime.strptime(
|
||||
properties.get("start_date"), "%Y-%m-%d"
|
||||
).date()
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error_code": 4101,
|
||||
"error_message": "INVALID_ISSUE_START_DATE",
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
bulk_issue_activities.append(
|
||||
{
|
||||
"type": "issue.activity.updated",
|
||||
"requested_data": json.dumps(
|
||||
{"start_date": properties.get("start_date")}
|
||||
),
|
||||
"current_instance": json.dumps(
|
||||
{"start_date": str(issue.start_date)}
|
||||
),
|
||||
"issue_id": str(issue.id),
|
||||
"actor_id": str(request.user.id),
|
||||
"project_id": str(project_id),
|
||||
"epoch": epoch,
|
||||
}
|
||||
)
|
||||
issue.start_date = properties.get("start_date")
|
||||
|
||||
# Target date
|
||||
if properties.get("target_date", False):
|
||||
if (
|
||||
issue.start_date
|
||||
and not properties.get("start_date", False)
|
||||
and issue.start_date
|
||||
>= datetime.strptime(
|
||||
properties.get("target_date"), "%Y-%m-%d"
|
||||
).date()
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error_code": 4102,
|
||||
"error_message": "INVALID_ISSUE_TARGET_DATE",
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
bulk_issue_activities.append(
|
||||
{
|
||||
"type": "issue.activity.updated",
|
||||
"requested_data": json.dumps(
|
||||
{"target_date": properties.get("target_date")}
|
||||
),
|
||||
"current_instance": json.dumps(
|
||||
{"target_date": str(issue.target_date)}
|
||||
),
|
||||
"issue_id": str(issue.id),
|
||||
"actor_id": str(request.user.id),
|
||||
"project_id": str(project_id),
|
||||
"epoch": epoch,
|
||||
}
|
||||
)
|
||||
issue.target_date = properties.get("target_date")
|
||||
|
||||
bulk_update_issues.append(issue)
|
||||
|
||||
# Labels
|
||||
if properties.get("label_ids", []):
|
||||
for label_id in properties.get("label_ids", []):
|
||||
bulk_update_issue_labels.append(
|
||||
IssueLabel(
|
||||
issue=issue,
|
||||
label_id=label_id,
|
||||
created_by=request.user,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
)
|
||||
)
|
||||
bulk_issue_activities.append(
|
||||
{
|
||||
"type": "issue.activity.updated",
|
||||
"requested_data": json.dumps(
|
||||
{"label_ids": properties.get("label_ids", [])}
|
||||
),
|
||||
"current_instance": json.dumps(
|
||||
{
|
||||
"label_ids": [
|
||||
str(label.id)
|
||||
for label in issue.labels.all()
|
||||
]
|
||||
}
|
||||
),
|
||||
"issue_id": str(issue.id),
|
||||
"actor_id": str(request.user.id),
|
||||
"project_id": str(project_id),
|
||||
"epoch": epoch,
|
||||
}
|
||||
)
|
||||
|
||||
# Assignees
|
||||
if properties.get("assignee_ids", []):
|
||||
for assignee_id in properties.get(
|
||||
"assignee_ids", issue.assignees
|
||||
):
|
||||
bulk_update_issue_assignees.append(
|
||||
IssueAssignee(
|
||||
issue=issue,
|
||||
assignee_id=assignee_id,
|
||||
created_by=request.user,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
)
|
||||
)
|
||||
bulk_issue_activities.append(
|
||||
{
|
||||
"type": "issue.activity.updated",
|
||||
"requested_data": json.dumps(
|
||||
{
|
||||
"assignee_ids": properties.get(
|
||||
"assignee_ids", []
|
||||
)
|
||||
}
|
||||
),
|
||||
"current_instance": json.dumps(
|
||||
{
|
||||
"assignee_ids": [
|
||||
str(assignee.id)
|
||||
for assignee in issue.assignees.all()
|
||||
]
|
||||
}
|
||||
),
|
||||
"issue_id": str(issue.id),
|
||||
"actor_id": str(request.user.id),
|
||||
"project_id": str(project_id),
|
||||
"epoch": epoch,
|
||||
}
|
||||
)
|
||||
|
||||
# Bulk update all the objects
|
||||
Issue.objects.bulk_update(
|
||||
bulk_update_issues,
|
||||
[
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"state",
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
# Create new labels
|
||||
IssueLabel.objects.bulk_create(
|
||||
bulk_update_issue_labels,
|
||||
ignore_conflicts=True,
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
# Create new assignees
|
||||
IssueAssignee.objects.bulk_create(
|
||||
bulk_update_issue_assignees,
|
||||
ignore_conflicts=True,
|
||||
batch_size=100,
|
||||
)
|
||||
# update the issue activity
|
||||
[
|
||||
issue_activity.delay(**activity)
|
||||
for activity in bulk_issue_activities
|
||||
]
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -6,18 +6,14 @@ from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.models import (
|
||||
Case,
|
||||
CharField,
|
||||
Exists,
|
||||
F,
|
||||
Func,
|
||||
Max,
|
||||
OuterRef,
|
||||
Prefetch,
|
||||
Q,
|
||||
UUIDField,
|
||||
Value,
|
||||
When,
|
||||
)
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils import timezone
|
||||
@@ -28,6 +24,7 @@ from django.views.decorators.gzip import gzip_page
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.app.serializers import (
|
||||
IssueCreateSerializer,
|
||||
@@ -44,10 +41,17 @@ from plane.db.models import (
|
||||
IssueSubscriber,
|
||||
Project,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
issue_on_results,
|
||||
issue_queryset_grouper,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.user_timezone_converter import user_timezone_converter
|
||||
|
||||
# Module imports
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import (
|
||||
GroupedOffsetPaginator,
|
||||
SubGroupedOffsetPaginator,
|
||||
)
|
||||
from .. import BaseViewSet
|
||||
|
||||
|
||||
@@ -88,153 +92,116 @@ class IssueDraftViewSet(BaseViewSet):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
).distinct()
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
def list(self, request, slug, project_id):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
# Custom ordering for priority and state
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
state_order = [
|
||||
"backlog",
|
||||
"unstarted",
|
||||
"started",
|
||||
"completed",
|
||||
"cancelled",
|
||||
]
|
||||
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
|
||||
issue_queryset = self.get_queryset().filter(**filters)
|
||||
# Issue queryset
|
||||
issue_queryset, order_by_param = order_issue_queryset(
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
priority_order = (
|
||||
priority_order
|
||||
if order_by_param == "priority"
|
||||
else priority_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
|
||||
# State Ordering
|
||||
elif order_by_param in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
"-state__name",
|
||||
"-state__group",
|
||||
]:
|
||||
state_order = (
|
||||
state_order
|
||||
if order_by_param in ["state__name", "state__group"]
|
||||
else state_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
state_order=Case(
|
||||
*[
|
||||
When(state__group=state_group, then=Value(i))
|
||||
for i, state_group in enumerate(state_order)
|
||||
],
|
||||
default=Value(len(state_order)),
|
||||
output_field=CharField(),
|
||||
# issue queryset
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset,
|
||||
group_by=group_by,
|
||||
sub_group_by=sub_group_by,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
# Check group and sub group value paginate
|
||||
if sub_group_by:
|
||||
if group_by == sub_group_by:
|
||||
return Response(
|
||||
{
|
||||
"error": "Group by and sub group by cannot have same parameters"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
else:
|
||||
# group and sub group pagination
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=SubGroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
sub_group_by_fields=issue_group_values(
|
||||
field=sub_group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
sub_group_by_field_name=sub_group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
# Group Paginate
|
||||
else:
|
||||
# Group paginate
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
).order_by("state_order")
|
||||
# assignee and label ordering
|
||||
elif order_by_param in [
|
||||
"labels__name",
|
||||
"-labels__name",
|
||||
"assignees__first_name",
|
||||
"-assignees__first_name",
|
||||
]:
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
max_values=Max(
|
||||
order_by_param[1::]
|
||||
if order_by_param.startswith("-")
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-max_values"
|
||||
if order_by_param.startswith("-")
|
||||
else "max_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
# Only use serializer when expand else return by values
|
||||
if self.expand or self.fields:
|
||||
issues = IssueSerializer(
|
||||
issue_queryset,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
).data
|
||||
else:
|
||||
issues = issue_queryset.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
# List Paginate
|
||||
return self.paginate(
|
||||
order_by=order_by_param,
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues, sub_group_by=sub_group_by
|
||||
),
|
||||
)
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
issues = user_timezone_converter(
|
||||
issues, datetime_fields, request.user.user_timezone
|
||||
)
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
@@ -265,12 +232,45 @@ class IssueDraftViewSet(BaseViewSet):
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
|
||||
issue = (
|
||||
self.get_queryset().filter(pk=serializer.data["id"]).first()
|
||||
)
|
||||
return Response(
|
||||
IssueSerializer(issue).data, status=status.HTTP_201_CREATED
|
||||
issue_queryset_grouper(
|
||||
queryset=self.get_queryset().filter(
|
||||
pk=serializer.data["id"]
|
||||
),
|
||||
group_by=None,
|
||||
sub_group_by=None,
|
||||
)
|
||||
.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
)
|
||||
.first()
|
||||
)
|
||||
return Response(issue, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
@@ -309,6 +309,33 @@ class IssueDraftViewSet(BaseViewSet):
|
||||
issue = (
|
||||
self.get_queryset()
|
||||
.filter(pk=pk)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
|
||||
@@ -165,6 +165,7 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
plot_type = request.GET.get("plot_type", "issues")
|
||||
if pk is None:
|
||||
queryset = self.get_queryset()
|
||||
modules = queryset.values( # Required fields
|
||||
@@ -323,6 +324,7 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
queryset=modules,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type=plot_type,
|
||||
module_id=pk,
|
||||
)
|
||||
|
||||
|
||||
@@ -16,8 +16,9 @@ from django.db.models import (
|
||||
Subquery,
|
||||
UUIDField,
|
||||
Value,
|
||||
Sum,
|
||||
)
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.db.models.functions import Coalesce, Cast
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.utils import timezone
|
||||
|
||||
@@ -128,6 +129,34 @@ class ModuleViewSet(BaseViewSet):
|
||||
.annotate(cnt=Count("pk"))
|
||||
.values("cnt")
|
||||
)
|
||||
completed_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
state__group="completed",
|
||||
issue_module__module_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_module__module_id")
|
||||
.annotate(
|
||||
completed_estimate_points=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.values("completed_estimate_points")[:1]
|
||||
)
|
||||
|
||||
total_estimate_point = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
issue_module__module_id=OuterRef("pk"),
|
||||
)
|
||||
.values("issue_module__module_id")
|
||||
.annotate(
|
||||
total_estimate_points=Sum(
|
||||
Cast("estimate_point__value", IntegerField())
|
||||
)
|
||||
)
|
||||
.values("total_estimate_points")[:1]
|
||||
)
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
@@ -182,6 +211,18 @@ class ModuleViewSet(BaseViewSet):
|
||||
Value(0, output_field=IntegerField()),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_estimate_points=Coalesce(
|
||||
Subquery(completed_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
total_estimate_points=Coalesce(
|
||||
Subquery(total_estimate_point),
|
||||
Value(0, output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
member_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
@@ -225,6 +266,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
"logo_props",
|
||||
# computed fields
|
||||
"is_favorite",
|
||||
"cancelled_issues",
|
||||
@@ -232,6 +274,8 @@ class ModuleViewSet(BaseViewSet):
|
||||
"total_issues",
|
||||
"started_issues",
|
||||
"unstarted_issues",
|
||||
"completed_estimate_points",
|
||||
"total_estimate_points",
|
||||
"backlog_issues",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
@@ -281,7 +325,10 @@ class ModuleViewSet(BaseViewSet):
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
"logo_props",
|
||||
# computed fields
|
||||
"completed_estimate_points",
|
||||
"total_estimate_points",
|
||||
"total_issues",
|
||||
"is_favorite",
|
||||
"cancelled_issues",
|
||||
@@ -299,6 +346,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
return Response(modules, status=status.HTTP_200_OK)
|
||||
|
||||
def retrieve(self, request, slug, project_id, pk):
|
||||
plot_type = request.GET.get("plot_type", "burndown")
|
||||
queryset = (
|
||||
self.get_queryset()
|
||||
.filter(archived_at__isnull=True)
|
||||
@@ -421,6 +469,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
queryset=modules,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type=plot_type,
|
||||
module_id=pk,
|
||||
)
|
||||
|
||||
@@ -465,7 +514,10 @@ class ModuleViewSet(BaseViewSet):
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
"logo_props",
|
||||
# computed fields
|
||||
"completed_estimate_points",
|
||||
"total_estimate_points",
|
||||
"is_favorite",
|
||||
"cancelled_issues",
|
||||
"completed_issues",
|
||||
|
||||
@@ -1,37 +1,50 @@
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
from django.db.models import (
|
||||
F,
|
||||
Func,
|
||||
OuterRef,
|
||||
Q,
|
||||
)
|
||||
|
||||
# Django Imports
|
||||
from django.utils import timezone
|
||||
from django.db.models import F, OuterRef, Func, Q
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import Value, UUIDField
|
||||
from django.db.models.functions import Coalesce
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from plane.app.permissions import (
|
||||
ProjectEntityPermission,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
ModuleIssueSerializer,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueAttachment,
|
||||
IssueLink,
|
||||
ModuleIssue,
|
||||
Project,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
issue_on_results,
|
||||
issue_queryset_grouper,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import (
|
||||
GroupedOffsetPaginator,
|
||||
SubGroupedOffsetPaginator,
|
||||
)
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet
|
||||
from plane.app.serializers import (
|
||||
ModuleIssueSerializer,
|
||||
IssueSerializer,
|
||||
)
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import (
|
||||
ModuleIssue,
|
||||
Project,
|
||||
Issue,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.user_timezone_converter import user_timezone_converter
|
||||
|
||||
class ModuleIssueViewSet(BaseViewSet):
|
||||
serializer_class = ModuleIssueSerializer
|
||||
@@ -80,82 +93,115 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
).distinct()
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
def list(self, request, slug, project_id, module_id):
|
||||
fields = [
|
||||
field
|
||||
for field in request.GET.get("fields", "").split(",")
|
||||
if field
|
||||
]
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issue_queryset = self.get_queryset().filter(**filters)
|
||||
if self.fields or self.expand:
|
||||
issues = IssueSerializer(
|
||||
issue_queryset, many=True, fields=fields if fields else None
|
||||
).data
|
||||
else:
|
||||
issues = issue_queryset.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
)
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
issues = user_timezone_converter(
|
||||
issues, datetime_fields, request.user.user_timezone
|
||||
)
|
||||
order_by_param = request.GET.get("order_by", "created_at")
|
||||
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
# Issue queryset
|
||||
issue_queryset, order_by_param = order_issue_queryset(
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
|
||||
# issue queryset
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset,
|
||||
group_by=group_by,
|
||||
sub_group_by=sub_group_by,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
# Check group and sub group value paginate
|
||||
if sub_group_by:
|
||||
if group_by == sub_group_by:
|
||||
return Response(
|
||||
{
|
||||
"error": "Group by and sub group by cannot have same parameters"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
else:
|
||||
# group and sub group pagination
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=SubGroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
sub_group_by_fields=issue_group_values(
|
||||
field=sub_group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
sub_group_by_field_name=sub_group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
# Group Paginate
|
||||
else:
|
||||
# Group paginate
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
else:
|
||||
# List Paginate
|
||||
return self.paginate(
|
||||
order_by=order_by_param,
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues, sub_group_by=sub_group_by
|
||||
),
|
||||
)
|
||||
|
||||
# create multiple issues inside a module
|
||||
def create_module_issues(self, request, slug, project_id, module_id):
|
||||
|
||||
@@ -1,26 +1,27 @@
|
||||
# Django imports
|
||||
from django.db.models import Q, OuterRef, Exists
|
||||
from django.db.models import Exists, OuterRef, Q
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from plane.utils.paginator import BasePaginator
|
||||
|
||||
# Module imports
|
||||
from ..base import BaseViewSet, BaseAPIView
|
||||
from plane.db.models import (
|
||||
Notification,
|
||||
IssueAssignee,
|
||||
IssueSubscriber,
|
||||
Issue,
|
||||
WorkspaceMember,
|
||||
UserNotificationPreference,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
NotificationSerializer,
|
||||
UserNotificationPreferenceSerializer,
|
||||
)
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueAssignee,
|
||||
IssueSubscriber,
|
||||
Notification,
|
||||
UserNotificationPreference,
|
||||
WorkspaceMember,
|
||||
)
|
||||
from plane.utils.paginator import BasePaginator
|
||||
|
||||
# Module imports
|
||||
from ..base import BaseAPIView, BaseViewSet
|
||||
|
||||
|
||||
class NotificationViewSet(BaseViewSet, BasePaginator):
|
||||
@@ -131,6 +132,7 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
|
||||
"cursor", False
|
||||
):
|
||||
return self.paginate(
|
||||
order_by=request.GET.get("order_by", "-created_at"),
|
||||
request=request,
|
||||
queryset=(notifications),
|
||||
on_results=lambda notifications: NotificationSerializer(
|
||||
|
||||
@@ -1,26 +1,25 @@
|
||||
# Python imports
|
||||
import boto3
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import (
|
||||
Prefetch,
|
||||
Q,
|
||||
Exists,
|
||||
OuterRef,
|
||||
F,
|
||||
Func,
|
||||
OuterRef,
|
||||
Prefetch,
|
||||
Q,
|
||||
Subquery,
|
||||
)
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework import serializers
|
||||
from rest_framework import serializers, status
|
||||
from rest_framework.permissions import AllowAny
|
||||
|
||||
# Module imports
|
||||
@@ -28,27 +27,26 @@ from plane.app.views.base import BaseViewSet, BaseAPIView
|
||||
from plane.app.serializers import (
|
||||
ProjectSerializer,
|
||||
ProjectListSerializer,
|
||||
ProjectDeployBoardSerializer,
|
||||
DeployBoardSerializer,
|
||||
)
|
||||
|
||||
from plane.app.permissions import (
|
||||
ProjectBasePermission,
|
||||
ProjectMemberPermission,
|
||||
)
|
||||
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
ProjectMember,
|
||||
Workspace,
|
||||
State,
|
||||
UserFavorite,
|
||||
ProjectIdentifier,
|
||||
Module,
|
||||
Cycle,
|
||||
Inbox,
|
||||
ProjectDeployBoard,
|
||||
DeployBoard,
|
||||
IssueProperty,
|
||||
Issue,
|
||||
Module,
|
||||
Project,
|
||||
ProjectIdentifier,
|
||||
ProjectMember,
|
||||
State,
|
||||
Workspace,
|
||||
)
|
||||
from plane.utils.cache import cache_response
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
@@ -137,12 +135,11 @@ class ProjectViewSet(BaseViewSet):
|
||||
).values("role")
|
||||
)
|
||||
.annotate(
|
||||
is_deployed=Exists(
|
||||
ProjectDeployBoard.objects.filter(
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
)
|
||||
)
|
||||
anchor=DeployBoard.objects.filter(
|
||||
entity_name="project",
|
||||
entity_identifier=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
).values("anchor")
|
||||
)
|
||||
.annotate(sort_order=Subquery(sort_order))
|
||||
.prefetch_related(
|
||||
@@ -169,6 +166,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
"cursor", False
|
||||
):
|
||||
return self.paginate(
|
||||
order_by=request.GET.get("order_by", "-created_at"),
|
||||
request=request,
|
||||
queryset=(projects),
|
||||
on_results=lambda projects: ProjectListSerializer(
|
||||
@@ -639,29 +637,28 @@ class ProjectPublicCoverImagesEndpoint(BaseAPIView):
|
||||
return Response(files, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class ProjectDeployBoardViewSet(BaseViewSet):
|
||||
class DeployBoardViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectMemberPermission,
|
||||
]
|
||||
serializer_class = ProjectDeployBoardSerializer
|
||||
model = ProjectDeployBoard
|
||||
serializer_class = DeployBoardSerializer
|
||||
model = DeployBoard
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
)
|
||||
.select_related("project")
|
||||
)
|
||||
def list(self, request, slug, project_id):
|
||||
project_deploy_board = DeployBoard.objects.filter(
|
||||
entity_name="project",
|
||||
entity_identifier=project_id,
|
||||
workspace__slug=slug,
|
||||
).first()
|
||||
|
||||
serializer = DeployBoardSerializer(project_deploy_board)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
comments = request.data.get("comments", False)
|
||||
reactions = request.data.get("reactions", False)
|
||||
comments = request.data.get("is_comments_enabled", False)
|
||||
reactions = request.data.get("is_reactions_enabled", False)
|
||||
inbox = request.data.get("inbox", None)
|
||||
votes = request.data.get("votes", False)
|
||||
votes = request.data.get("is_votes_enabled", False)
|
||||
views = request.data.get(
|
||||
"views",
|
||||
{
|
||||
@@ -673,17 +670,18 @@ class ProjectDeployBoardViewSet(BaseViewSet):
|
||||
},
|
||||
)
|
||||
|
||||
project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create(
|
||||
anchor=f"{slug}/{project_id}",
|
||||
project_deploy_board, _ = DeployBoard.objects.get_or_create(
|
||||
entity_name="project",
|
||||
entity_identifier=project_id,
|
||||
project_id=project_id,
|
||||
)
|
||||
project_deploy_board.comments = comments
|
||||
project_deploy_board.reactions = reactions
|
||||
project_deploy_board.inbox = inbox
|
||||
project_deploy_board.votes = votes
|
||||
project_deploy_board.views = views
|
||||
project_deploy_board.view_props = views
|
||||
project_deploy_board.is_votes_enabled = votes
|
||||
project_deploy_board.is_comments_enabled = comments
|
||||
project_deploy_board.is_reactions_enabled = reactions
|
||||
|
||||
project_deploy_board.save()
|
||||
|
||||
serializer = ProjectDeployBoardSerializer(project_deploy_board)
|
||||
serializer = DeployBoardSerializer(project_deploy_board)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -250,6 +250,7 @@ class UserActivityEndpoint(BaseAPIView, BasePaginator):
|
||||
).select_related("actor", "workspace", "issue", "project")
|
||||
|
||||
return self.paginate(
|
||||
order_by=request.GET.get("order_by", "-created_at"),
|
||||
request=request,
|
||||
queryset=queryset,
|
||||
on_results=lambda issue_activities: IssueActivitySerializer(
|
||||
|
||||
@@ -1,47 +1,56 @@
|
||||
# Django imports
|
||||
from django.db.models import (
|
||||
Q,
|
||||
OuterRef,
|
||||
Func,
|
||||
F,
|
||||
Case,
|
||||
Value,
|
||||
CharField,
|
||||
When,
|
||||
Exists,
|
||||
Max,
|
||||
)
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import UUIDField
|
||||
from django.db.models import (
|
||||
Exists,
|
||||
F,
|
||||
Func,
|
||||
OuterRef,
|
||||
Q,
|
||||
UUIDField,
|
||||
Value,
|
||||
)
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
from rest_framework import status
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from plane.app.permissions import (
|
||||
ProjectEntityPermission,
|
||||
WorkspaceEntityPermission,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
IssueViewSerializer,
|
||||
)
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueAttachment,
|
||||
IssueLink,
|
||||
IssueView,
|
||||
Workspace,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
issue_on_results,
|
||||
issue_queryset_grouper,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import (
|
||||
GroupedOffsetPaginator,
|
||||
SubGroupedOffsetPaginator,
|
||||
)
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet
|
||||
from plane.app.serializers import (
|
||||
IssueViewSerializer,
|
||||
IssueSerializer,
|
||||
)
|
||||
from plane.app.permissions import (
|
||||
WorkspaceEntityPermission,
|
||||
ProjectEntityPermission,
|
||||
)
|
||||
|
||||
from plane.db.models import (
|
||||
Workspace,
|
||||
IssueView,
|
||||
Issue,
|
||||
UserFavorite,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.user_timezone_converter import user_timezone_converter
|
||||
|
||||
|
||||
class GlobalViewViewSet(BaseViewSet):
|
||||
serializer_class = IssueViewSerializer
|
||||
@@ -143,17 +152,6 @@ class GlobalViewIssuesViewSet(BaseViewSet):
|
||||
@method_decorator(gzip_page)
|
||||
def list(self, request, slug):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
# Custom ordering for priority and state
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
state_order = [
|
||||
"backlog",
|
||||
"unstarted",
|
||||
"started",
|
||||
"completed",
|
||||
"cancelled",
|
||||
]
|
||||
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
|
||||
issue_queryset = (
|
||||
@@ -162,103 +160,107 @@ class GlobalViewIssuesViewSet(BaseViewSet):
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
)
|
||||
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
priority_order = (
|
||||
priority_order
|
||||
if order_by_param == "priority"
|
||||
else priority_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
# Issue queryset
|
||||
issue_queryset, order_by_param = order_issue_queryset(
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
# State Ordering
|
||||
elif order_by_param in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
"-state__name",
|
||||
"-state__group",
|
||||
]:
|
||||
state_order = (
|
||||
state_order
|
||||
if order_by_param in ["state__name", "state__group"]
|
||||
else state_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
state_order=Case(
|
||||
*[
|
||||
When(state__group=state_group, then=Value(i))
|
||||
for i, state_group in enumerate(state_order)
|
||||
],
|
||||
default=Value(len(state_order)),
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("state_order")
|
||||
# assignee and label ordering
|
||||
elif order_by_param in [
|
||||
"labels__name",
|
||||
"-labels__name",
|
||||
"assignees__first_name",
|
||||
"-assignees__first_name",
|
||||
]:
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
max_values=Max(
|
||||
order_by_param[1::]
|
||||
if order_by_param.startswith("-")
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-max_values"
|
||||
if order_by_param.startswith("-")
|
||||
else "max_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
|
||||
if self.fields:
|
||||
issues = IssueSerializer(
|
||||
issue_queryset, many=True, fields=self.fields
|
||||
).data
|
||||
# issue queryset
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset,
|
||||
group_by=group_by,
|
||||
sub_group_by=sub_group_by,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
# Check group and sub group value paginate
|
||||
if sub_group_by:
|
||||
if group_by == sub_group_by:
|
||||
return Response(
|
||||
{
|
||||
"error": "Group by and sub group by cannot have same parameters"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
else:
|
||||
# group and sub group pagination
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=SubGroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=None,
|
||||
filters=filters,
|
||||
),
|
||||
sub_group_by_fields=issue_group_values(
|
||||
field=sub_group_by,
|
||||
slug=slug,
|
||||
project_id=None,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
sub_group_by_field_name=sub_group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
# Group Paginate
|
||||
else:
|
||||
# Group paginate
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=None,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
else:
|
||||
issues = issue_queryset.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
# List Paginate
|
||||
return self.paginate(
|
||||
order_by=order_by_param,
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues, sub_group_by=sub_group_by
|
||||
),
|
||||
)
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
issues = user_timezone_converter(
|
||||
issues, datetime_fields, request.user.user_timezone
|
||||
)
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class IssueViewViewSet(BaseViewSet):
|
||||
|
||||
@@ -1,61 +1,66 @@
|
||||
# Python imports
|
||||
from datetime import date
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.db.models import (
|
||||
OuterRef,
|
||||
Func,
|
||||
F,
|
||||
Q,
|
||||
Count,
|
||||
Case,
|
||||
Value,
|
||||
CharField,
|
||||
When,
|
||||
Max,
|
||||
Count,
|
||||
F,
|
||||
Func,
|
||||
IntegerField,
|
||||
UUIDField,
|
||||
OuterRef,
|
||||
Q,
|
||||
Value,
|
||||
When,
|
||||
)
|
||||
from django.db.models.functions import ExtractWeek, Cast
|
||||
from django.db.models.fields import DateField
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.db.models.functions import Cast, ExtractWeek
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party modules
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.app.serializers import (
|
||||
WorkSpaceSerializer,
|
||||
ProjectMemberSerializer,
|
||||
IssueActivitySerializer,
|
||||
IssueSerializer,
|
||||
WorkspaceUserPropertiesSerializer,
|
||||
)
|
||||
from plane.app.views.base import BaseAPIView
|
||||
from plane.db.models import (
|
||||
User,
|
||||
Workspace,
|
||||
ProjectMember,
|
||||
IssueActivity,
|
||||
Issue,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
IssueSubscriber,
|
||||
Project,
|
||||
WorkspaceMember,
|
||||
CycleIssue,
|
||||
WorkspaceUserProperties,
|
||||
)
|
||||
from plane.app.permissions import (
|
||||
WorkspaceEntityPermission,
|
||||
WorkspaceViewerPermission,
|
||||
)
|
||||
|
||||
# Module imports
|
||||
from plane.app.serializers import (
|
||||
IssueActivitySerializer,
|
||||
ProjectMemberSerializer,
|
||||
WorkSpaceSerializer,
|
||||
WorkspaceUserPropertiesSerializer,
|
||||
)
|
||||
from plane.app.views.base import BaseAPIView
|
||||
from plane.db.models import (
|
||||
CycleIssue,
|
||||
Issue,
|
||||
IssueActivity,
|
||||
IssueAttachment,
|
||||
IssueLink,
|
||||
IssueSubscriber,
|
||||
Project,
|
||||
ProjectMember,
|
||||
User,
|
||||
Workspace,
|
||||
WorkspaceMember,
|
||||
WorkspaceUserProperties,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
issue_on_results,
|
||||
issue_queryset_grouper,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import (
|
||||
GroupedOffsetPaginator,
|
||||
SubGroupedOffsetPaginator,
|
||||
)
|
||||
|
||||
|
||||
class UserLastProjectWithWorkspaceEndpoint(BaseAPIView):
|
||||
@@ -99,22 +104,8 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
def get(self, request, slug, user_id):
|
||||
fields = [
|
||||
field
|
||||
for field in request.GET.get("fields", "").split(",")
|
||||
if field
|
||||
]
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
# Custom ordering for priority and state
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
state_order = [
|
||||
"backlog",
|
||||
"unstarted",
|
||||
"started",
|
||||
"completed",
|
||||
"cancelled",
|
||||
]
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
issue_queryset = (
|
||||
@@ -152,100 +143,103 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
.order_by("created_at")
|
||||
).distinct()
|
||||
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
priority_order = (
|
||||
priority_order
|
||||
if order_by_param == "priority"
|
||||
else priority_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
# Issue queryset
|
||||
issue_queryset, order_by_param = order_issue_queryset(
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
# State Ordering
|
||||
elif order_by_param in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
"-state__name",
|
||||
"-state__group",
|
||||
]:
|
||||
state_order = (
|
||||
state_order
|
||||
if order_by_param in ["state__name", "state__group"]
|
||||
else state_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
state_order=Case(
|
||||
*[
|
||||
When(state__group=state_group, then=Value(i))
|
||||
for i, state_group in enumerate(state_order)
|
||||
],
|
||||
default=Value(len(state_order)),
|
||||
output_field=CharField(),
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
|
||||
# issue queryset
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset,
|
||||
group_by=group_by,
|
||||
sub_group_by=sub_group_by,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
if sub_group_by:
|
||||
if group_by == sub_group_by:
|
||||
return Response(
|
||||
{
|
||||
"error": "Group by and sub group by cannot have same parameters"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
else:
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=SubGroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
filters=filters,
|
||||
),
|
||||
sub_group_by_fields=issue_group_values(
|
||||
field=sub_group_by,
|
||||
slug=slug,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
sub_group_by_field_name=sub_group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
else:
|
||||
# Group paginate
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
).order_by("state_order")
|
||||
# assignee and label ordering
|
||||
elif order_by_param in [
|
||||
"labels__name",
|
||||
"-labels__name",
|
||||
"assignees__first_name",
|
||||
"-assignees__first_name",
|
||||
]:
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
max_values=Max(
|
||||
order_by_param[1::]
|
||||
if order_by_param.startswith("-")
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-max_values"
|
||||
if order_by_param.startswith("-")
|
||||
else "max_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
issues = IssueSerializer(
|
||||
issue_queryset, many=True, fields=fields if fields else None
|
||||
).data
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
return self.paginate(
|
||||
order_by=order_by_param,
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues, sub_group_by=sub_group_by
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceUserPropertiesEndpoint(BaseAPIView):
|
||||
@@ -397,6 +391,7 @@ class WorkspaceUserActivityEndpoint(BaseAPIView):
|
||||
queryset = queryset.filter(project__in=projects)
|
||||
|
||||
return self.paginate(
|
||||
order_by=request.GET.get("order_by", "-created_at"),
|
||||
request=request,
|
||||
queryset=queryset,
|
||||
on_results=lambda issue_activities: IssueActivitySerializer(
|
||||
|
||||
@@ -4,6 +4,8 @@ import uuid
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.core.validators import validate_email
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
# Third party imports
|
||||
from zxcvbn import zxcvbn
|
||||
@@ -46,68 +48,71 @@ class Adapter:
|
||||
def authenticate(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def complete_login_or_signup(self):
|
||||
email = self.user_data.get("email")
|
||||
user = User.objects.filter(email=email).first()
|
||||
# Check if sign up case or login
|
||||
is_signup = bool(user)
|
||||
if not user:
|
||||
# New user
|
||||
(ENABLE_SIGNUP,) = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "ENABLE_SIGNUP",
|
||||
"default": os.environ.get("ENABLE_SIGNUP", "1"),
|
||||
},
|
||||
]
|
||||
)
|
||||
if (
|
||||
ENABLE_SIGNUP == "0"
|
||||
and not WorkspaceMemberInvite.objects.filter(
|
||||
email=email,
|
||||
).exists()
|
||||
):
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["SIGNUP_DISABLED"],
|
||||
error_message="SIGNUP_DISABLED",
|
||||
payload={"email": email},
|
||||
)
|
||||
user = User(email=email, username=uuid.uuid4().hex)
|
||||
|
||||
if self.user_data.get("user").get("is_password_autoset"):
|
||||
user.set_password(uuid.uuid4().hex)
|
||||
user.is_password_autoset = True
|
||||
user.is_email_verified = True
|
||||
else:
|
||||
# Validate password
|
||||
results = zxcvbn(self.code)
|
||||
if results["score"] < 3:
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES[
|
||||
"INVALID_PASSWORD"
|
||||
],
|
||||
error_message="INVALID_PASSWORD",
|
||||
payload={"email": email},
|
||||
)
|
||||
|
||||
user.set_password(self.code)
|
||||
user.is_password_autoset = False
|
||||
|
||||
avatar = self.user_data.get("user", {}).get("avatar", "")
|
||||
first_name = self.user_data.get("user", {}).get("first_name", "")
|
||||
last_name = self.user_data.get("user", {}).get("last_name", "")
|
||||
user.avatar = avatar if avatar else ""
|
||||
user.first_name = first_name if first_name else ""
|
||||
user.last_name = last_name if last_name else ""
|
||||
user.save()
|
||||
Profile.objects.create(user=user)
|
||||
|
||||
if not user.is_active:
|
||||
def sanitize_email(self, email):
|
||||
# Check if email is present
|
||||
if not email:
|
||||
raise AuthenticationException(
|
||||
AUTHENTICATION_ERROR_CODES["USER_ACCOUNT_DEACTIVATED"],
|
||||
error_message="USER_ACCOUNT_DEACTIVATED",
|
||||
error_code=AUTHENTICATION_ERROR_CODES["INVALID_EMAIL"],
|
||||
error_message="INVALID_EMAIL",
|
||||
payload={"email": email},
|
||||
)
|
||||
|
||||
# Sanitize email
|
||||
email = str(email).lower().strip()
|
||||
|
||||
# validate email
|
||||
try:
|
||||
validate_email(email)
|
||||
except ValidationError:
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["INVALID_EMAIL"],
|
||||
error_message="INVALID_EMAIL",
|
||||
payload={"email": email},
|
||||
)
|
||||
# Return email
|
||||
return email
|
||||
|
||||
def validate_password(self, email):
|
||||
"""Validate password strength"""
|
||||
results = zxcvbn(self.code)
|
||||
if results["score"] < 3:
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["INVALID_PASSWORD"],
|
||||
error_message="INVALID_PASSWORD",
|
||||
payload={"email": email},
|
||||
)
|
||||
return
|
||||
|
||||
def __check_signup(self, email):
|
||||
"""Check if sign up is enabled or not and raise exception if not enabled"""
|
||||
|
||||
# Get configuration value
|
||||
(ENABLE_SIGNUP,) = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "ENABLE_SIGNUP",
|
||||
"default": os.environ.get("ENABLE_SIGNUP", "1"),
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
# Check if sign up is disabled and invite is present or not
|
||||
if (
|
||||
ENABLE_SIGNUP == "0"
|
||||
and not WorkspaceMemberInvite.objects.filter(
|
||||
email=email,
|
||||
).exists()
|
||||
):
|
||||
# Raise exception
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["SIGNUP_DISABLED"],
|
||||
error_message="SIGNUP_DISABLED",
|
||||
payload={"email": email},
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def save_user_data(self, user):
|
||||
# Update user details
|
||||
user.last_login_medium = self.provider
|
||||
user.last_active = timezone.now()
|
||||
@@ -116,7 +121,63 @@ class Adapter:
|
||||
user.last_login_uagent = self.request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
return user
|
||||
|
||||
def complete_login_or_signup(self):
|
||||
# Get email
|
||||
email = self.user_data.get("email")
|
||||
|
||||
# Sanitize email
|
||||
email = self.sanitize_email(email)
|
||||
|
||||
# Check if the user is present
|
||||
user = User.objects.filter(email=email).first()
|
||||
# Check if sign up case or login
|
||||
is_signup = bool(user)
|
||||
# If user is not present, create a new user
|
||||
if not user:
|
||||
# New user
|
||||
self.__check_signup(email)
|
||||
|
||||
# Initialize user
|
||||
user = User(email=email, username=uuid.uuid4().hex)
|
||||
|
||||
# Check if password is autoset
|
||||
if self.user_data.get("user").get("is_password_autoset"):
|
||||
user.set_password(uuid.uuid4().hex)
|
||||
user.is_password_autoset = True
|
||||
user.is_email_verified = True
|
||||
|
||||
# Validate password
|
||||
else:
|
||||
# Validate password
|
||||
self.validate_password(email)
|
||||
# Set password
|
||||
user.set_password(self.code)
|
||||
user.is_password_autoset = False
|
||||
|
||||
# Set user details
|
||||
avatar = self.user_data.get("user", {}).get("avatar", "")
|
||||
first_name = self.user_data.get("user", {}).get("first_name", "")
|
||||
last_name = self.user_data.get("user", {}).get("last_name", "")
|
||||
user.avatar = avatar if avatar else ""
|
||||
user.first_name = first_name if first_name else ""
|
||||
user.last_name = last_name if last_name else ""
|
||||
user.save()
|
||||
|
||||
# Create profile
|
||||
Profile.objects.create(user=user)
|
||||
|
||||
if not user.is_active:
|
||||
raise AuthenticationException(
|
||||
AUTHENTICATION_ERROR_CODES["USER_ACCOUNT_DEACTIVATED"],
|
||||
error_message="USER_ACCOUNT_DEACTIVATED",
|
||||
)
|
||||
|
||||
# Save user data
|
||||
user = self.save_user_data(user=user)
|
||||
|
||||
# Call callback if present
|
||||
if self.callback:
|
||||
self.callback(
|
||||
user,
|
||||
@@ -124,7 +185,9 @@ class Adapter:
|
||||
self.request,
|
||||
)
|
||||
|
||||
# Create or update account if token data is present
|
||||
if self.token_data:
|
||||
self.create_update_account(user=user)
|
||||
|
||||
# Return user
|
||||
return user
|
||||
|
||||
@@ -58,6 +58,8 @@ AUTHENTICATION_ERROR_CODES = {
|
||||
"ADMIN_USER_DEACTIVATED": 5190,
|
||||
# Rate limit
|
||||
"RATE_LIMIT_EXCEEDED": 5900,
|
||||
# Unknown
|
||||
"AUTHENTICATION_FAILED": 5999,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -81,11 +81,11 @@ class OauthAdapter(Adapter):
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.RequestException:
|
||||
code = (
|
||||
"GOOGLE_OAUTH_PROVIDER_ERROR"
|
||||
if self.provider == "google"
|
||||
else "GITHUB_OAUTH_PROVIDER_ERROR"
|
||||
)
|
||||
if self.provider == "google":
|
||||
code = "GOOGLE_OAUTH_PROVIDER_ERROR"
|
||||
if self.provider == "github":
|
||||
code = "GITHUB_OAUTH_PROVIDER_ERROR"
|
||||
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES[code],
|
||||
error_message=str(code),
|
||||
|
||||
@@ -4,6 +4,7 @@ from plane.db.models import (
|
||||
WorkspaceMember,
|
||||
WorkspaceMemberInvite,
|
||||
)
|
||||
from plane.utils.cache import invalidate_cache_directly
|
||||
|
||||
|
||||
def process_workspace_project_invitations(user):
|
||||
@@ -26,6 +27,16 @@ def process_workspace_project_invitations(user):
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
[
|
||||
invalidate_cache_directly(
|
||||
path=f"/api/workspaces/{str(workspace_member_invite.workspace.slug)}/members/",
|
||||
url_params=False,
|
||||
user=False,
|
||||
multiple=True,
|
||||
)
|
||||
for workspace_member_invite in workspace_member_invites
|
||||
]
|
||||
|
||||
# Check if user has any project invites
|
||||
project_member_invites = ProjectMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
|
||||
@@ -28,6 +28,7 @@ from plane.db.models import (
|
||||
Project,
|
||||
State,
|
||||
User,
|
||||
EstimatePoint,
|
||||
)
|
||||
from plane.settings.redis import redis_instance
|
||||
from plane.utils.exception_logger import log_exception
|
||||
@@ -448,21 +449,37 @@ def track_estimate_points(
|
||||
if current_instance.get("estimate_point") != requested_data.get(
|
||||
"estimate_point"
|
||||
):
|
||||
old_estimate = (
|
||||
EstimatePoint.objects.filter(
|
||||
pk=current_instance.get("estimate_point")
|
||||
).first()
|
||||
if current_instance.get("estimate_point") is not None
|
||||
else None
|
||||
)
|
||||
new_estimate = (
|
||||
EstimatePoint.objects.filter(
|
||||
pk=requested_data.get("estimate_point")
|
||||
).first()
|
||||
if requested_data.get("estimate_point") is not None
|
||||
else None
|
||||
)
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue_id,
|
||||
actor_id=actor_id,
|
||||
verb="updated",
|
||||
old_value=(
|
||||
old_identifier=(
|
||||
current_instance.get("estimate_point")
|
||||
if current_instance.get("estimate_point") is not None
|
||||
else ""
|
||||
else None
|
||||
),
|
||||
new_value=(
|
||||
new_identifier=(
|
||||
requested_data.get("estimate_point")
|
||||
if requested_data.get("estimate_point") is not None
|
||||
else ""
|
||||
else None
|
||||
),
|
||||
old_value=old_estimate.value if old_estimate else None,
|
||||
new_value=new_estimate.value if new_estimate else None,
|
||||
field="estimate_point",
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
|
||||
@@ -5,6 +5,7 @@ import logging
|
||||
from celery import shared_task
|
||||
|
||||
# Django imports
|
||||
# Third party imports
|
||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.html import strip_tags
|
||||
|
||||
260
apiserver/plane/db/migrations/0067_issue_estimate.py
Normal file
260
apiserver/plane/db/migrations/0067_issue_estimate.py
Normal file
@@ -0,0 +1,260 @@
|
||||
# # Generated by Django 4.2.7 on 2024-05-24 09:47
|
||||
# Python imports
|
||||
import uuid
|
||||
from uuid import uuid4
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import plane.db.models.deploy_board
|
||||
|
||||
|
||||
def issue_estimate_point(apps, schema_editor):
|
||||
Issue = apps.get_model("db", "Issue")
|
||||
Project = apps.get_model("db", "Project")
|
||||
EstimatePoint = apps.get_model("db", "EstimatePoint")
|
||||
IssueActivity = apps.get_model("db", "IssueActivity")
|
||||
updated_estimate_point = []
|
||||
updated_issue_activity = []
|
||||
|
||||
# loop through all the projects
|
||||
for project in Project.objects.filter(estimate__isnull=False):
|
||||
estimate_points = EstimatePoint.objects.filter(
|
||||
estimate=project.estimate, project=project
|
||||
)
|
||||
|
||||
for issue_activity in IssueActivity.objects.filter(
|
||||
field="estimate_point", project=project
|
||||
):
|
||||
if issue_activity.new_value:
|
||||
new_identifier = estimate_points.filter(
|
||||
key=issue_activity.new_value
|
||||
).first().id
|
||||
issue_activity.new_identifier = new_identifier
|
||||
new_value = estimate_points.filter(
|
||||
key=issue_activity.new_value
|
||||
).first().value
|
||||
issue_activity.new_value = new_value
|
||||
|
||||
if issue_activity.old_value:
|
||||
old_identifier = estimate_points.filter(
|
||||
key=issue_activity.old_value
|
||||
).first().id
|
||||
issue_activity.old_identifier = old_identifier
|
||||
old_value = estimate_points.filter(
|
||||
key=issue_activity.old_value
|
||||
).first().value
|
||||
issue_activity.old_value = old_value
|
||||
updated_issue_activity.append(issue_activity)
|
||||
|
||||
for issue in Issue.objects.filter(
|
||||
point__isnull=False, project=project
|
||||
):
|
||||
# get the estimate id for the corresponding estimate point in the issue
|
||||
estimate = estimate_points.filter(key=issue.point).first()
|
||||
issue.estimate_point = estimate
|
||||
updated_estimate_point.append(issue)
|
||||
|
||||
Issue.objects.bulk_update(
|
||||
updated_estimate_point, ["estimate_point"], batch_size=1000
|
||||
)
|
||||
IssueActivity.objects.bulk_update(
|
||||
updated_issue_activity,
|
||||
["new_value", "old_value", "new_identifier", "old_identifier"],
|
||||
batch_size=1000,
|
||||
)
|
||||
|
||||
|
||||
def last_used_estimate(apps, schema_editor):
|
||||
Project = apps.get_model("db", "Project")
|
||||
Estimate = apps.get_model("db", "Estimate")
|
||||
|
||||
# Get all estimate ids used in projects
|
||||
estimate_ids = Project.objects.filter(estimate__isnull=False).values_list(
|
||||
"estimate", flat=True
|
||||
)
|
||||
|
||||
# Update all matching estimates
|
||||
Estimate.objects.filter(id__in=estimate_ids).update(last_used=True)
|
||||
|
||||
|
||||
def populate_deploy_board(apps, schema_editor):
|
||||
DeployBoard = apps.get_model("db", "DeployBoard")
|
||||
ProjectDeployBoard = apps.get_model("db", "ProjectDeployBoard")
|
||||
|
||||
DeployBoard.objects.bulk_create(
|
||||
[
|
||||
DeployBoard(
|
||||
entity_identifier=deploy_board.project_id,
|
||||
project_id=deploy_board.project_id,
|
||||
entity_name="project",
|
||||
anchor=uuid4().hex,
|
||||
is_comments_enabled=deploy_board.comments,
|
||||
is_reactions_enabled=deploy_board.reactions,
|
||||
inbox=deploy_board.inbox,
|
||||
is_votes_enabled=deploy_board.votes,
|
||||
view_props=deploy_board.views,
|
||||
workspace_id=deploy_board.workspace_id,
|
||||
created_at=deploy_board.created_at,
|
||||
updated_at=deploy_board.updated_at,
|
||||
created_by_id=deploy_board.created_by_id,
|
||||
updated_by_id=deploy_board.updated_by_id,
|
||||
)
|
||||
for deploy_board in ProjectDeployBoard.objects.all()
|
||||
],
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("db", "0066_account_id_token_cycle_logo_props_module_logo_props"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="DeployBoard",
|
||||
fields=[
|
||||
(
|
||||
"created_at",
|
||||
models.DateTimeField(
|
||||
auto_now_add=True, verbose_name="Created At"
|
||||
),
|
||||
),
|
||||
(
|
||||
"updated_at",
|
||||
models.DateTimeField(
|
||||
auto_now=True, verbose_name="Last Modified At"
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
db_index=True,
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
("entity_identifier", models.UUIDField(null=True)),
|
||||
(
|
||||
"entity_name",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("project", "Project"),
|
||||
("issue", "Issue"),
|
||||
("module", "Module"),
|
||||
("cycle", "Task"),
|
||||
("page", "Page"),
|
||||
("view", "View"),
|
||||
],
|
||||
max_length=30,
|
||||
),
|
||||
),
|
||||
(
|
||||
"anchor",
|
||||
models.CharField(
|
||||
db_index=True,
|
||||
default=plane.db.models.deploy_board.get_anchor,
|
||||
max_length=255,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
("is_comments_enabled", models.BooleanField(default=False)),
|
||||
("is_reactions_enabled", models.BooleanField(default=False)),
|
||||
("is_votes_enabled", models.BooleanField(default=False)),
|
||||
("view_props", models.JSONField(default=dict)),
|
||||
(
|
||||
"created_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="%(class)s_created_by",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="Created By",
|
||||
),
|
||||
),
|
||||
(
|
||||
"inbox",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="board_inbox",
|
||||
to="db.inbox",
|
||||
),
|
||||
),
|
||||
(
|
||||
"project",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="project_%(class)s",
|
||||
to="db.project",
|
||||
),
|
||||
),
|
||||
(
|
||||
"updated_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="%(class)s_updated_by",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="Last Modified By",
|
||||
),
|
||||
),
|
||||
(
|
||||
"workspace",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="workspace_%(class)s",
|
||||
to="db.workspace",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Deploy Board",
|
||||
"verbose_name_plural": "Deploy Boards",
|
||||
"db_table": "deploy_boards",
|
||||
"ordering": ("-created_at",),
|
||||
"unique_together": {("entity_name", "entity_identifier")},
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="estimate",
|
||||
name="last_used",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
# Rename the existing field
|
||||
migrations.RenameField(
|
||||
model_name="issue",
|
||||
old_name="estimate_point",
|
||||
new_name="point",
|
||||
),
|
||||
# Add a new field with the original name as a foreign key
|
||||
migrations.AddField(
|
||||
model_name="issue",
|
||||
name="estimate_point",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="issue_estimates",
|
||||
to="db.EstimatePoint",
|
||||
blank=True,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="estimate",
|
||||
name="type",
|
||||
field=models.CharField(default="categories", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="estimatepoint",
|
||||
name="value",
|
||||
field=models.CharField(max_length=255),
|
||||
),
|
||||
migrations.RunPython(issue_estimate_point),
|
||||
migrations.RunPython(last_used_estimate),
|
||||
migrations.RunPython(populate_deploy_board),
|
||||
]
|
||||
@@ -4,6 +4,7 @@ from .asset import FileAsset
|
||||
from .base import BaseModel
|
||||
from .cycle import Cycle, CycleFavorite, CycleIssue, CycleUserProperties
|
||||
from .dashboard import Dashboard, DashboardWidget, Widget
|
||||
from .deploy_board import DeployBoard
|
||||
from .estimate import Estimate, EstimatePoint
|
||||
from .exporter import ExporterHistory
|
||||
from .importer import Importer
|
||||
@@ -53,13 +54,13 @@ from .page import Page, PageFavorite, PageLabel, PageLog
|
||||
from .project import (
|
||||
Project,
|
||||
ProjectBaseModel,
|
||||
ProjectDeployBoard,
|
||||
ProjectFavorite,
|
||||
ProjectIdentifier,
|
||||
ProjectMember,
|
||||
ProjectMemberInvite,
|
||||
ProjectPublicMember,
|
||||
)
|
||||
from .deploy_board import DeployBoard
|
||||
from .session import Session
|
||||
from .social_connection import SocialLoginConnection
|
||||
from .state import State
|
||||
|
||||
@@ -12,6 +12,7 @@ from .base import BaseModel
|
||||
|
||||
|
||||
def get_upload_path(instance, filename):
|
||||
filename = filename[:50]
|
||||
if instance.workspace_id is not None:
|
||||
return f"{instance.workspace.id}/{uuid4().hex}-{filename}"
|
||||
return f"user-{uuid4().hex}-{filename}"
|
||||
|
||||
53
apiserver/plane/db/models/deploy_board.py
Normal file
53
apiserver/plane/db/models/deploy_board.py
Normal file
@@ -0,0 +1,53 @@
|
||||
# Python imports
|
||||
from uuid import uuid4
|
||||
|
||||
# Django imports
|
||||
from django.db import models
|
||||
|
||||
# Module imports
|
||||
from .workspace import WorkspaceBaseModel
|
||||
|
||||
|
||||
def get_anchor():
|
||||
return uuid4().hex
|
||||
|
||||
|
||||
class DeployBoard(WorkspaceBaseModel):
|
||||
TYPE_CHOICES = (
|
||||
("project", "Project"),
|
||||
("issue", "Issue"),
|
||||
("module", "Module"),
|
||||
("cycle", "Task"),
|
||||
("page", "Page"),
|
||||
("view", "View"),
|
||||
)
|
||||
|
||||
entity_identifier = models.UUIDField(null=True)
|
||||
entity_name = models.CharField(
|
||||
max_length=30,
|
||||
choices=TYPE_CHOICES,
|
||||
)
|
||||
anchor = models.CharField(
|
||||
max_length=255, default=get_anchor, unique=True, db_index=True
|
||||
)
|
||||
is_comments_enabled = models.BooleanField(default=False)
|
||||
is_reactions_enabled = models.BooleanField(default=False)
|
||||
inbox = models.ForeignKey(
|
||||
"db.Inbox",
|
||||
related_name="board_inbox",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
)
|
||||
is_votes_enabled = models.BooleanField(default=False)
|
||||
view_props = models.JSONField(default=dict)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the deploy board"""
|
||||
return f"{self.entity_identifier} <{self.entity_name}>"
|
||||
|
||||
class Meta:
|
||||
unique_together = ["entity_name", "entity_identifier"]
|
||||
verbose_name = "Deploy Board"
|
||||
verbose_name_plural = "Deploy Boards"
|
||||
db_table = "deploy_boards"
|
||||
ordering = ("-created_at",)
|
||||
@@ -11,7 +11,8 @@ class Estimate(ProjectBaseModel):
|
||||
description = models.TextField(
|
||||
verbose_name="Estimate Description", blank=True
|
||||
)
|
||||
type = models.CharField(max_length=255, default="Categories")
|
||||
type = models.CharField(max_length=255, default="categories")
|
||||
last_used = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the estimate"""
|
||||
@@ -35,7 +36,7 @@ class EstimatePoint(ProjectBaseModel):
|
||||
default=0, validators=[MinValueValidator(0), MaxValueValidator(12)]
|
||||
)
|
||||
description = models.TextField(blank=True)
|
||||
value = models.CharField(max_length=20)
|
||||
value = models.CharField(max_length=255)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the estimate"""
|
||||
|
||||
@@ -119,11 +119,18 @@ class Issue(ProjectBaseModel):
|
||||
blank=True,
|
||||
related_name="state_issue",
|
||||
)
|
||||
estimate_point = models.IntegerField(
|
||||
point = models.IntegerField(
|
||||
validators=[MinValueValidator(0), MaxValueValidator(12)],
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
estimate_point = models.ForeignKey(
|
||||
"db.EstimatePoint",
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="issue_estimates",
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
name = models.CharField(max_length=255, verbose_name="Issue Name")
|
||||
description = models.JSONField(blank=True, default=dict)
|
||||
description_html = models.TextField(blank=True, default="<p></p>")
|
||||
|
||||
@@ -260,6 +260,8 @@ def get_default_views():
|
||||
}
|
||||
|
||||
|
||||
# DEPRECATED TODO:
|
||||
# used to get the old anchors for the project deploy boards
|
||||
class ProjectDeployBoard(ProjectBaseModel):
|
||||
anchor = models.CharField(
|
||||
max_length=255, default=get_anchor, unique=True, db_index=True
|
||||
|
||||
@@ -13,12 +13,9 @@ class InstanceSerializer(BaseSerializer):
|
||||
model = Instance
|
||||
exclude = [
|
||||
"license_key",
|
||||
"api_key",
|
||||
"version",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"instance_id",
|
||||
"email",
|
||||
"last_checked_at",
|
||||
"is_setup_done",
|
||||
|
||||
@@ -49,8 +49,8 @@ class Command(BaseCommand):
|
||||
instance_name="Plane Community Edition",
|
||||
instance_id=secrets.token_hex(12),
|
||||
license_key=None,
|
||||
api_key=secrets.token_hex(8),
|
||||
version=payload.get("version"),
|
||||
current_version=payload.get("version"),
|
||||
latest_version=payload.get("version"),
|
||||
last_checked_at=timezone.now(),
|
||||
user_count=payload.get("user_count", 0),
|
||||
)
|
||||
|
||||
@@ -0,0 +1,106 @@
|
||||
# Generated by Django 4.2.11 on 2024-05-31 10:46
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("license", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="instance",
|
||||
name="instance_id",
|
||||
field=models.CharField(max_length=255, unique=True),
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name="instance",
|
||||
old_name="version",
|
||||
new_name="current_version",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="instance",
|
||||
name="api_key",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="instance",
|
||||
name="domain",
|
||||
field=models.TextField(blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="instance",
|
||||
name="latest_version",
|
||||
field=models.CharField(blank=True, max_length=10, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="instance",
|
||||
name="product",
|
||||
field=models.CharField(default="plane-ce", max_length=50),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ChangeLog",
|
||||
fields=[
|
||||
(
|
||||
"created_at",
|
||||
models.DateTimeField(
|
||||
auto_now_add=True, verbose_name="Created At"
|
||||
),
|
||||
),
|
||||
(
|
||||
"updated_at",
|
||||
models.DateTimeField(
|
||||
auto_now=True, verbose_name="Last Modified At"
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
db_index=True,
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
("title", models.CharField(max_length=100)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("version", models.CharField(max_length=100)),
|
||||
("tags", models.JSONField(default=list)),
|
||||
("release_date", models.DateTimeField(null=True)),
|
||||
("is_release_candidate", models.BooleanField(default=False)),
|
||||
(
|
||||
"created_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="%(class)s_created_by",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="Created By",
|
||||
),
|
||||
),
|
||||
(
|
||||
"updated_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="%(class)s_updated_by",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="Last Modified By",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Change Log",
|
||||
"verbose_name_plural": "Change Logs",
|
||||
"db_table": "changelogs",
|
||||
"ordering": ("-created_at",),
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,43 @@
|
||||
# Generated by Django 4.2.11 on 2024-06-05 13:41
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("license", "0002_rename_version_instance_current_version_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="changelog",
|
||||
name="title",
|
||||
field=models.CharField(max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="changelog",
|
||||
name="version",
|
||||
field=models.CharField(max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="instance",
|
||||
name="current_version",
|
||||
field=models.CharField(max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="instance",
|
||||
name="latest_version",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="instance",
|
||||
name="namespace",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="instance",
|
||||
name="product",
|
||||
field=models.CharField(default="plane-ce", max_length=255),
|
||||
),
|
||||
]
|
||||
@@ -1,3 +1,6 @@
|
||||
# Python imports
|
||||
from enum import Enum
|
||||
|
||||
# Django imports
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
@@ -8,17 +11,25 @@ from plane.db.models import BaseModel
|
||||
ROLE_CHOICES = ((20, "Admin"),)
|
||||
|
||||
|
||||
class ProductTypes(Enum):
|
||||
PLANE_CE = "plane-ce"
|
||||
|
||||
|
||||
class Instance(BaseModel):
|
||||
# General informations
|
||||
# General information
|
||||
instance_name = models.CharField(max_length=255)
|
||||
whitelist_emails = models.TextField(blank=True, null=True)
|
||||
instance_id = models.CharField(max_length=25, unique=True)
|
||||
instance_id = models.CharField(max_length=255, unique=True)
|
||||
license_key = models.CharField(max_length=256, null=True, blank=True)
|
||||
api_key = models.CharField(max_length=16)
|
||||
version = models.CharField(max_length=10)
|
||||
# Instnace specifics
|
||||
current_version = models.CharField(max_length=255)
|
||||
latest_version = models.CharField(max_length=255, null=True, blank=True)
|
||||
product = models.CharField(
|
||||
max_length=255, default=ProductTypes.PLANE_CE.value
|
||||
)
|
||||
domain = models.TextField(blank=True)
|
||||
# Instance specifics
|
||||
last_checked_at = models.DateTimeField()
|
||||
namespace = models.CharField(max_length=50, blank=True, null=True)
|
||||
namespace = models.CharField(max_length=255, blank=True, null=True)
|
||||
# telemetry and support
|
||||
is_telemetry_enabled = models.BooleanField(default=True)
|
||||
is_support_required = models.BooleanField(default=True)
|
||||
@@ -70,3 +81,20 @@ class InstanceConfiguration(BaseModel):
|
||||
verbose_name_plural = "Instance Configurations"
|
||||
db_table = "instance_configurations"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
|
||||
class ChangeLog(BaseModel):
|
||||
"""Change Log model to store the release changelogs made in the application."""
|
||||
|
||||
title = models.CharField(max_length=255)
|
||||
description = models.TextField(blank=True)
|
||||
version = models.CharField(max_length=255)
|
||||
tags = models.JSONField(default=list)
|
||||
release_date = models.DateTimeField(null=True)
|
||||
is_release_candidate = models.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Change Log"
|
||||
verbose_name_plural = "Change Logs"
|
||||
db_table = "changelogs"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
@@ -225,6 +225,9 @@ DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
||||
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
|
||||
|
||||
# Storage Settings
|
||||
# Use Minio settings
|
||||
USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1
|
||||
|
||||
STORAGES = {
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||
@@ -243,7 +246,7 @@ AWS_S3_FILE_OVERWRITE = False
|
||||
AWS_S3_ENDPOINT_URL = os.environ.get(
|
||||
"AWS_S3_ENDPOINT_URL", None
|
||||
) or os.environ.get("MINIO_ENDPOINT_URL", None)
|
||||
if AWS_S3_ENDPOINT_URL:
|
||||
if AWS_S3_ENDPOINT_URL and USE_MINIO:
|
||||
parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost"))
|
||||
AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}"
|
||||
AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:"
|
||||
@@ -307,8 +310,6 @@ GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False)
|
||||
ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False)
|
||||
ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False)
|
||||
|
||||
# Use Minio settings
|
||||
USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1
|
||||
|
||||
# Posthog settings
|
||||
POSTHOG_API_KEY = os.environ.get("POSTHOG_API_KEY", False)
|
||||
@@ -350,4 +351,4 @@ CSRF_FAILURE_VIEW = "plane.authentication.views.common.csrf_failure"
|
||||
# Base URLs
|
||||
ADMIN_BASE_URL = os.environ.get("ADMIN_BASE_URL", None)
|
||||
SPACE_BASE_URL = os.environ.get("SPACE_BASE_URL", None)
|
||||
APP_BASE_URL = os.environ.get("APP_BASE_URL") or os.environ.get("WEB_URL")
|
||||
APP_BASE_URL = os.environ.get("APP_BASE_URL")
|
||||
|
||||
@@ -10,7 +10,7 @@ from plane.space.views import (
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
||||
"anchor/<str:anchor>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
||||
InboxIssuePublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
@@ -20,7 +20,7 @@ urlpatterns = [
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
||||
"anchor/<str:anchor>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
||||
InboxIssuePublicViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
@@ -31,7 +31,7 @@ urlpatterns = [
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/votes/",
|
||||
"anchor/<str:anchor>/issues/<uuid:issue_id>/votes/",
|
||||
IssueVotePublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
|
||||
@@ -10,12 +10,12 @@ from plane.space.views import (
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/",
|
||||
"anchor/<str:anchor>/issues/<uuid:issue_id>/",
|
||||
IssueRetrievePublicEndpoint.as_view(),
|
||||
name="workspace-project-boards",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
||||
"anchor/<str:anchor>/issues/<uuid:issue_id>/comments/",
|
||||
IssueCommentPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
@@ -25,7 +25,7 @@ urlpatterns = [
|
||||
name="issue-comments-project-board",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
||||
"anchor/<str:anchor>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
||||
IssueCommentPublicViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
@@ -36,7 +36,7 @@ urlpatterns = [
|
||||
name="issue-comments-project-board",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
||||
"anchor/<str:anchor>/issues/<uuid:issue_id>/reactions/",
|
||||
IssueReactionPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
@@ -46,7 +46,7 @@ urlpatterns = [
|
||||
name="issue-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
||||
"anchor/<str:anchor>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
||||
IssueReactionPublicViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
@@ -55,7 +55,7 @@ urlpatterns = [
|
||||
name="issue-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
||||
"anchor/<str:anchor>/comments/<uuid:comment_id>/reactions/",
|
||||
CommentReactionPublicViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
@@ -65,7 +65,7 @@ urlpatterns = [
|
||||
name="comment-reactions-project-board",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
||||
"anchor/<str:anchor>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
||||
CommentReactionPublicViewSet.as_view(
|
||||
{
|
||||
"delete": "destroy",
|
||||
|
||||
@@ -4,17 +4,23 @@ from django.urls import path
|
||||
from plane.space.views import (
|
||||
ProjectDeployBoardPublicSettingsEndpoint,
|
||||
ProjectIssuesPublicEndpoint,
|
||||
WorkspaceProjectAnchorEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/project-boards/<uuid:project_id>/settings/",
|
||||
"anchor/<str:anchor>/settings/",
|
||||
ProjectDeployBoardPublicSettingsEndpoint.as_view(),
|
||||
name="project-deploy-board-settings",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/",
|
||||
"anchor/<str:anchor>/issues/",
|
||||
ProjectIssuesPublicEndpoint.as_view(),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/anchor/",
|
||||
WorkspaceProjectAnchorEndpoint.as_view(),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from .project import (
|
||||
ProjectDeployBoardPublicSettingsEndpoint,
|
||||
WorkspaceProjectDeployBoardEndpoint,
|
||||
WorkspaceProjectAnchorEndpoint,
|
||||
)
|
||||
|
||||
from .issue import (
|
||||
|
||||
@@ -18,7 +18,7 @@ from plane.db.models import (
|
||||
State,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
ProjectDeployBoard,
|
||||
DeployBoard,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
IssueSerializer,
|
||||
@@ -39,7 +39,7 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
)
|
||||
@@ -58,9 +58,9 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
||||
)
|
||||
return InboxIssue.objects.none()
|
||||
|
||||
def list(self, request, slug, project_id, inbox_id):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
def list(self, request, anchor, inbox_id):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response(
|
||||
@@ -72,8 +72,8 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
||||
issues = (
|
||||
Issue.objects.filter(
|
||||
issue_inbox__inbox_id=inbox_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
workspace_id=project_deploy_board.workspace_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(bridge_id=F("issue_inbox__id"))
|
||||
@@ -117,9 +117,9 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, inbox_id):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
def create(self, request, anchor, inbox_id):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response(
|
||||
@@ -151,7 +151,7 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
||||
name="Triage",
|
||||
group="backlog",
|
||||
description="Default state for managing all Inbox Issues",
|
||||
project_id=project_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
color="#ff7700",
|
||||
)
|
||||
|
||||
@@ -163,7 +163,7 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
||||
"description_html", "<p></p>"
|
||||
),
|
||||
priority=request.data.get("issue", {}).get("priority", "low"),
|
||||
project_id=project_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
state=state,
|
||||
)
|
||||
|
||||
@@ -173,14 +173,14 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
project_id=str(project_deploy_board.project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
# create an inbox issue
|
||||
InboxIssue.objects.create(
|
||||
inbox_id=inbox_id,
|
||||
project_id=project_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
issue=issue,
|
||||
source=request.data.get("source", "in-app"),
|
||||
)
|
||||
@@ -188,9 +188,9 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
||||
serializer = IssueStateInboxSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def partial_update(self, request, slug, project_id, inbox_id, pk):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
def partial_update(self, request, anchor, inbox_id, pk):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response(
|
||||
@@ -200,8 +200,8 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
||||
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
workspace_id=project_deploy_board.workspace_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
inbox_id=inbox_id,
|
||||
)
|
||||
# Get the project member
|
||||
@@ -216,8 +216,8 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
||||
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
workspace_id=project_deploy_board.workspace_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
)
|
||||
# viewers and guests since only viewers and guests
|
||||
issue_data = {
|
||||
@@ -242,7 +242,7 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
project_id=str(project_deploy_board.project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(current_instance).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
@@ -255,9 +255,9 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
||||
issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
def retrieve(self, request, slug, project_id, inbox_id, pk):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
def retrieve(self, request, anchor, inbox_id, pk):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response(
|
||||
@@ -267,21 +267,21 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
||||
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
workspace_id=project_deploy_board.workspace_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
inbox_id=inbox_id,
|
||||
)
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
workspace_id=project_deploy_board.workspace_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
)
|
||||
serializer = IssueStateInboxSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def destroy(self, request, slug, project_id, inbox_id, pk):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
def destroy(self, request, anchor, inbox_id, pk):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
if project_deploy_board.inbox is None:
|
||||
return Response(
|
||||
@@ -291,8 +291,8 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
||||
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
workspace_id=project_deploy_board.workspace_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
inbox_id=inbox_id,
|
||||
)
|
||||
|
||||
|
||||
@@ -1,56 +1,51 @@
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.models import Exists, F, Func, OuterRef, Q, Prefetch
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.db.models import (
|
||||
Prefetch,
|
||||
OuterRef,
|
||||
Func,
|
||||
F,
|
||||
Q,
|
||||
Case,
|
||||
Value,
|
||||
CharField,
|
||||
When,
|
||||
Exists,
|
||||
Max,
|
||||
IntegerField,
|
||||
)
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
|
||||
# Module imports
|
||||
from .base import BaseViewSet, BaseAPIView
|
||||
from plane.app.serializers import (
|
||||
IssueCommentSerializer,
|
||||
IssueReactionSerializer,
|
||||
CommentReactionSerializer,
|
||||
IssueVoteSerializer,
|
||||
IssuePublicSerializer,
|
||||
)
|
||||
# Third Party imports
|
||||
from rest_framework.response import Response
|
||||
|
||||
from plane.app.serializers import (
|
||||
CommentReactionSerializer,
|
||||
IssueCommentSerializer,
|
||||
IssuePublicSerializer,
|
||||
IssueReactionSerializer,
|
||||
IssueVoteSerializer,
|
||||
)
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueComment,
|
||||
Label,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
State,
|
||||
ProjectMember,
|
||||
IssueReaction,
|
||||
CommentReaction,
|
||||
ProjectDeployBoard,
|
||||
DeployBoard,
|
||||
IssueVote,
|
||||
ProjectPublicMember,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.utils.grouper import group_results
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
issue_on_results,
|
||||
issue_queryset_grouper,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import (
|
||||
GroupedOffsetPaginator,
|
||||
SubGroupedOffsetPaginator,
|
||||
)
|
||||
|
||||
# Module imports
|
||||
from .base import BaseAPIView, BaseViewSet
|
||||
|
||||
|
||||
class IssueCommentPublicViewSet(BaseViewSet):
|
||||
@@ -76,15 +71,15 @@ class IssueCommentPublicViewSet(BaseViewSet):
|
||||
|
||||
def get_queryset(self):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=self.kwargs.get("anchor"),
|
||||
entity_name="project",
|
||||
)
|
||||
if project_deploy_board.comments:
|
||||
if project_deploy_board.is_comments_enabled:
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(workspace_id=project_deploy_board.workspace_id)
|
||||
.filter(issue_id=self.kwargs.get("issue_id"))
|
||||
.filter(access="EXTERNAL")
|
||||
.select_related("project")
|
||||
@@ -93,8 +88,8 @@ class IssueCommentPublicViewSet(BaseViewSet):
|
||||
.annotate(
|
||||
is_member=Exists(
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
workspace_id=project_deploy_board.workspace_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
member_id=self.request.user.id,
|
||||
is_active=True,
|
||||
)
|
||||
@@ -103,15 +98,15 @@ class IssueCommentPublicViewSet(BaseViewSet):
|
||||
.distinct()
|
||||
).order_by("created_at")
|
||||
return IssueComment.objects.none()
|
||||
except ProjectDeployBoard.DoesNotExist:
|
||||
except DeployBoard.DoesNotExist:
|
||||
return IssueComment.objects.none()
|
||||
|
||||
def create(self, request, slug, project_id, issue_id):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
def create(self, request, anchor, issue_id):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
|
||||
if not project_deploy_board.comments:
|
||||
if not project_deploy_board.is_comments_enabled:
|
||||
return Response(
|
||||
{"error": "Comments are not enabled for this project"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -120,7 +115,7 @@ class IssueCommentPublicViewSet(BaseViewSet):
|
||||
serializer = IssueCommentSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
project_id=project_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
issue_id=issue_id,
|
||||
actor=request.user,
|
||||
access="EXTERNAL",
|
||||
@@ -132,37 +127,35 @@ class IssueCommentPublicViewSet(BaseViewSet):
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
project_id=str(project_deploy_board.project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
if not ProjectMember.objects.filter(
|
||||
project_id=project_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
).exists():
|
||||
# Add the user for workspace tracking
|
||||
_ = ProjectPublicMember.objects.get_or_create(
|
||||
project_id=project_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
member=request.user,
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def partial_update(self, request, slug, project_id, issue_id, pk):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
def partial_update(self, request, anchor, issue_id, pk):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
|
||||
if not project_deploy_board.comments:
|
||||
if not project_deploy_board.is_comments_enabled:
|
||||
return Response(
|
||||
{"error": "Comments are not enabled for this project"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
comment = IssueComment.objects.get(
|
||||
workspace__slug=slug, pk=pk, actor=request.user
|
||||
)
|
||||
comment = IssueComment.objects.get(pk=pk, actor=request.user)
|
||||
serializer = IssueCommentSerializer(
|
||||
comment, data=request.data, partial=True
|
||||
)
|
||||
@@ -173,7 +166,7 @@ class IssueCommentPublicViewSet(BaseViewSet):
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
project_id=str(project_deploy_board.project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueCommentSerializer(comment).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
@@ -183,20 +176,18 @@ class IssueCommentPublicViewSet(BaseViewSet):
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def destroy(self, request, slug, project_id, issue_id, pk):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
def destroy(self, request, anchor, issue_id, pk):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
|
||||
if not project_deploy_board.comments:
|
||||
if not project_deploy_board.is_comments_enabled:
|
||||
return Response(
|
||||
{"error": "Comments are not enabled for this project"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
comment = IssueComment.objects.get(
|
||||
workspace__slug=slug,
|
||||
pk=pk,
|
||||
project_id=project_id,
|
||||
actor=request.user,
|
||||
)
|
||||
issue_activity.delay(
|
||||
@@ -204,7 +195,7 @@ class IssueCommentPublicViewSet(BaseViewSet):
|
||||
requested_data=json.dumps({"comment_id": str(pk)}),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
project_id=str(project_deploy_board.project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueCommentSerializer(comment).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
@@ -221,11 +212,11 @@ class IssueReactionPublicViewSet(BaseViewSet):
|
||||
|
||||
def get_queryset(self):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
)
|
||||
if project_deploy_board.reactions:
|
||||
if project_deploy_board.is_reactions_enabled:
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
@@ -236,15 +227,15 @@ class IssueReactionPublicViewSet(BaseViewSet):
|
||||
.distinct()
|
||||
)
|
||||
return IssueReaction.objects.none()
|
||||
except ProjectDeployBoard.DoesNotExist:
|
||||
except DeployBoard.DoesNotExist:
|
||||
return IssueReaction.objects.none()
|
||||
|
||||
def create(self, request, slug, project_id, issue_id):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
def create(self, request, anchor, issue_id):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
|
||||
if not project_deploy_board.reactions:
|
||||
if not project_deploy_board.is_reactions_enabled:
|
||||
return Response(
|
||||
{"error": "Reactions are not enabled for this project board"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -253,16 +244,18 @@ class IssueReactionPublicViewSet(BaseViewSet):
|
||||
serializer = IssueReactionSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
project_id=project_id, issue_id=issue_id, actor=request.user
|
||||
project_id=project_deploy_board.project_id,
|
||||
issue_id=issue_id,
|
||||
actor=request.user,
|
||||
)
|
||||
if not ProjectMember.objects.filter(
|
||||
project_id=project_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
).exists():
|
||||
# Add the user for workspace tracking
|
||||
_ = ProjectPublicMember.objects.get_or_create(
|
||||
project_id=project_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
member=request.user,
|
||||
)
|
||||
issue_activity.delay(
|
||||
@@ -272,25 +265,25 @@ class IssueReactionPublicViewSet(BaseViewSet):
|
||||
),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
project_id=str(project_deploy_board.project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def destroy(self, request, slug, project_id, issue_id, reaction_code):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
def destroy(self, request, anchor, issue_id, reaction_code):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
|
||||
if not project_deploy_board.reactions:
|
||||
if not project_deploy_board.is_reactions_enabled:
|
||||
return Response(
|
||||
{"error": "Reactions are not enabled for this project board"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
issue_reaction = IssueReaction.objects.get(
|
||||
workspace__slug=slug,
|
||||
workspace_id=project_deploy_board.workspace_id,
|
||||
issue_id=issue_id,
|
||||
reaction=reaction_code,
|
||||
actor=request.user,
|
||||
@@ -300,7 +293,7 @@ class IssueReactionPublicViewSet(BaseViewSet):
|
||||
requested_data=None,
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
project_id=str(project_deploy_board.project_id),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"reaction": str(reaction_code),
|
||||
@@ -319,30 +312,29 @@ class CommentReactionPublicViewSet(BaseViewSet):
|
||||
|
||||
def get_queryset(self):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=self.kwargs.get("anchor"), entity_name="project"
|
||||
)
|
||||
if project_deploy_board.reactions:
|
||||
if project_deploy_board.is_reactions_enabled:
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(workspace_id=project_deploy_board.workspace_id)
|
||||
.filter(project_id=project_deploy_board.project_id)
|
||||
.filter(comment_id=self.kwargs.get("comment_id"))
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
)
|
||||
return CommentReaction.objects.none()
|
||||
except ProjectDeployBoard.DoesNotExist:
|
||||
except DeployBoard.DoesNotExist:
|
||||
return CommentReaction.objects.none()
|
||||
|
||||
def create(self, request, slug, project_id, comment_id):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
def create(self, request, anchor, comment_id):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
|
||||
if not project_deploy_board.reactions:
|
||||
if not project_deploy_board.is_reactions_enabled:
|
||||
return Response(
|
||||
{"error": "Reactions are not enabled for this board"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -351,18 +343,18 @@ class CommentReactionPublicViewSet(BaseViewSet):
|
||||
serializer = CommentReactionSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
project_id=project_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
comment_id=comment_id,
|
||||
actor=request.user,
|
||||
)
|
||||
if not ProjectMember.objects.filter(
|
||||
project_id=project_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
).exists():
|
||||
# Add the user for workspace tracking
|
||||
_ = ProjectPublicMember.objects.get_or_create(
|
||||
project_id=project_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
member=request.user,
|
||||
)
|
||||
issue_activity.delay(
|
||||
@@ -379,19 +371,19 @@ class CommentReactionPublicViewSet(BaseViewSet):
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def destroy(self, request, slug, project_id, comment_id, reaction_code):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
def destroy(self, request, anchor, comment_id, reaction_code):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
if not project_deploy_board.reactions:
|
||||
if not project_deploy_board.is_reactions_enabled:
|
||||
return Response(
|
||||
{"error": "Reactions are not enabled for this board"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
comment_reaction = CommentReaction.objects.get(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_deploy_board.project_id,
|
||||
workspace_id=project_deploy_board.workspace_id,
|
||||
comment_id=comment_id,
|
||||
reaction=reaction_code,
|
||||
actor=request.user,
|
||||
@@ -401,7 +393,7 @@ class CommentReactionPublicViewSet(BaseViewSet):
|
||||
requested_data=None,
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
project_id=str(project_deploy_board.project_id),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"reaction": str(reaction_code),
|
||||
@@ -421,36 +413,42 @@ class IssueVotePublicViewSet(BaseViewSet):
|
||||
|
||||
def get_queryset(self):
|
||||
try:
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
workspace__slug=self.kwargs.get("anchor"),
|
||||
entity_name="project",
|
||||
)
|
||||
if project_deploy_board.votes:
|
||||
if project_deploy_board.is_votes_enabled:
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.filter(issue_id=self.kwargs.get("issue_id"))
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(workspace_id=project_deploy_board.workspace_id)
|
||||
.filter(project_id=project_deploy_board.project_id)
|
||||
)
|
||||
return IssueVote.objects.none()
|
||||
except ProjectDeployBoard.DoesNotExist:
|
||||
except DeployBoard.DoesNotExist:
|
||||
return IssueVote.objects.none()
|
||||
|
||||
def create(self, request, slug, project_id, issue_id):
|
||||
def create(self, request, anchor, issue_id):
|
||||
print("hite")
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
print("awer")
|
||||
issue_vote, _ = IssueVote.objects.get_or_create(
|
||||
actor_id=request.user.id,
|
||||
project_id=project_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
issue_id=issue_id,
|
||||
)
|
||||
print("AWer")
|
||||
# Add the user for workspace tracking
|
||||
if not ProjectMember.objects.filter(
|
||||
project_id=project_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
member=request.user,
|
||||
is_active=True,
|
||||
).exists():
|
||||
_ = ProjectPublicMember.objects.get_or_create(
|
||||
project_id=project_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
member=request.user,
|
||||
)
|
||||
issue_vote.vote = request.data.get("vote", 1)
|
||||
@@ -462,26 +460,29 @@ class IssueVotePublicViewSet(BaseViewSet):
|
||||
),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
project_id=str(project_deploy_board.project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
serializer = IssueVoteSerializer(issue_vote)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
def destroy(self, request, slug, project_id, issue_id):
|
||||
def destroy(self, request, anchor, issue_id):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
issue_vote = IssueVote.objects.get(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_id=issue_id,
|
||||
actor_id=request.user.id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
workspace_id=project_deploy_board.workspace_id,
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="issue_vote.activity.deleted",
|
||||
requested_data=None,
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
project_id=str(project_deploy_board.project_id),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"vote": str(issue_vote.vote),
|
||||
@@ -499,9 +500,14 @@ class IssueRetrievePublicEndpoint(BaseAPIView):
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id, issue_id):
|
||||
def get(self, request, anchor, issue_id):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
issue = Issue.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=issue_id
|
||||
workspace_id=project_deploy_board.workspace_id,
|
||||
project_id=project_deploy_board.project_id,
|
||||
pk=issue_id,
|
||||
)
|
||||
serializer = IssuePublicSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -512,27 +518,23 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
if not ProjectDeployBoard.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
def get(self, request, anchor):
|
||||
if not DeployBoard.objects.filter(
|
||||
anchor=anchor, entity_name="project"
|
||||
).exists():
|
||||
return Response(
|
||||
{"error": "Project is not published"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
|
||||
project_id = project_deploy_board.entity_identifier
|
||||
slug = project_deploy_board.workspace.slug
|
||||
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
# Custom ordering for priority and state
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
state_order = [
|
||||
"backlog",
|
||||
"unstarted",
|
||||
"started",
|
||||
"completed",
|
||||
"cancelled",
|
||||
]
|
||||
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
|
||||
issue_queryset = (
|
||||
@@ -544,8 +546,8 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.filter(project_id=project_id)
|
||||
.filter(workspace__slug=slug)
|
||||
.filter(project_id=project_deploy_board.project_id)
|
||||
.filter(workspace_id=project_deploy_board.workspace_id)
|
||||
.select_related("project", "workspace", "state", "parent")
|
||||
.prefetch_related("assignees", "labels")
|
||||
.prefetch_related(
|
||||
@@ -562,7 +564,6 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(module_id=F("issue_module__module_id"))
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
@@ -577,112 +578,118 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(
|
||||
parent=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
).distinct()
|
||||
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
|
||||
issue_queryset = self.get_queryset().filter(**filters)
|
||||
|
||||
# Issue queryset
|
||||
issue_queryset, order_by_param = order_issue_queryset(
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
priority_order = (
|
||||
priority_order
|
||||
if order_by_param == "priority"
|
||||
else priority_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
|
||||
# State Ordering
|
||||
elif order_by_param in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
"-state__name",
|
||||
"-state__group",
|
||||
]:
|
||||
state_order = (
|
||||
state_order
|
||||
if order_by_param in ["state__name", "state__group"]
|
||||
else state_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
state_order=Case(
|
||||
*[
|
||||
When(state__group=state_group, then=Value(i))
|
||||
for i, state_group in enumerate(state_order)
|
||||
],
|
||||
default=Value(len(state_order)),
|
||||
output_field=CharField(),
|
||||
# issue queryset
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset,
|
||||
group_by=group_by,
|
||||
sub_group_by=sub_group_by,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
# Check group and sub group value paginate
|
||||
if sub_group_by:
|
||||
if group_by == sub_group_by:
|
||||
return Response(
|
||||
{
|
||||
"error": "Group by and sub group by cannot have same parameters"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
else:
|
||||
# group and sub group pagination
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=SubGroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
sub_group_by_fields=issue_group_values(
|
||||
field=sub_group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
sub_group_by_field_name=sub_group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
# Group Paginate
|
||||
else:
|
||||
# Group paginate
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
).order_by("state_order")
|
||||
# assignee and label ordering
|
||||
elif order_by_param in [
|
||||
"labels__name",
|
||||
"-labels__name",
|
||||
"assignees__first_name",
|
||||
"-assignees__first_name",
|
||||
]:
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
max_values=Max(
|
||||
order_by_param[1::]
|
||||
if order_by_param.startswith("-")
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-max_values"
|
||||
if order_by_param.startswith("-")
|
||||
else "max_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
issues = IssuePublicSerializer(issue_queryset, many=True).data
|
||||
|
||||
state_group_order = [
|
||||
"backlog",
|
||||
"unstarted",
|
||||
"started",
|
||||
"completed",
|
||||
"cancelled",
|
||||
]
|
||||
|
||||
states = (
|
||||
State.objects.filter(
|
||||
~Q(name="Triage"),
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(
|
||||
custom_order=Case(
|
||||
*[
|
||||
When(group=value, then=Value(index))
|
||||
for index, value in enumerate(state_group_order)
|
||||
],
|
||||
default=Value(len(state_group_order)),
|
||||
output_field=IntegerField(),
|
||||
# List Paginate
|
||||
return self.paginate(
|
||||
order_by=order_by_param,
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues, sub_group_by=sub_group_by
|
||||
),
|
||||
)
|
||||
.values("name", "group", "color", "id")
|
||||
.order_by("custom_order", "sequence")
|
||||
)
|
||||
|
||||
labels = Label.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values("id", "name", "color", "parent")
|
||||
|
||||
## Grouping the results
|
||||
group_by = request.GET.get("group_by", False)
|
||||
if group_by:
|
||||
issues = group_results(issues, group_by)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"issues": issues,
|
||||
"states": states,
|
||||
"labels": labels,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@@ -11,10 +11,10 @@ from rest_framework.permissions import AllowAny
|
||||
|
||||
# Module imports
|
||||
from .base import BaseAPIView
|
||||
from plane.app.serializers import ProjectDeployBoardSerializer
|
||||
from plane.app.serializers import DeployBoardSerializer
|
||||
from plane.db.models import (
|
||||
Project,
|
||||
ProjectDeployBoard,
|
||||
DeployBoard,
|
||||
)
|
||||
|
||||
|
||||
@@ -23,11 +23,11 @@ class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView):
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
def get(self, request, anchor):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
serializer = ProjectDeployBoardSerializer(project_deploy_board)
|
||||
serializer = DeployBoardSerializer(project_deploy_board)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@@ -36,13 +36,16 @@ class WorkspaceProjectDeployBoardEndpoint(BaseAPIView):
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def get(self, request, slug):
|
||||
def get(self, request, anchor):
|
||||
deploy_board = DeployBoard.objects.filter(anchor=anchor, entity_name="project").values_list
|
||||
projects = (
|
||||
Project.objects.filter(workspace__slug=slug)
|
||||
Project.objects.filter(workspace=deploy_board.workspace)
|
||||
.annotate(
|
||||
is_public=Exists(
|
||||
ProjectDeployBoard.objects.filter(
|
||||
workspace__slug=slug, project_id=OuterRef("pk")
|
||||
DeployBoard.objects.filter(
|
||||
anchor=anchor,
|
||||
project_id=OuterRef("pk"),
|
||||
entity_name="project",
|
||||
)
|
||||
)
|
||||
)
|
||||
@@ -58,3 +61,16 @@ class WorkspaceProjectDeployBoardEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
return Response(projects, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class WorkspaceProjectAnchorEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
project_deploy_board = DeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
serializer = DeployBoardSerializer(project_deploy_board)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -4,18 +4,28 @@ from itertools import groupby
|
||||
|
||||
# Django import
|
||||
from django.db import models
|
||||
from django.db.models import Case, CharField, Count, F, Sum, Value, When
|
||||
from django.db.models import (
|
||||
Case,
|
||||
CharField,
|
||||
Count,
|
||||
F,
|
||||
Sum,
|
||||
Value,
|
||||
When,
|
||||
IntegerField,
|
||||
)
|
||||
from django.db.models.functions import (
|
||||
Coalesce,
|
||||
Concat,
|
||||
ExtractMonth,
|
||||
ExtractYear,
|
||||
TruncDate,
|
||||
Cast,
|
||||
)
|
||||
from django.utils import timezone
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import Issue
|
||||
from plane.db.models import Issue, Project
|
||||
|
||||
|
||||
def annotate_with_monthly_dimension(queryset, field_name, attribute):
|
||||
@@ -87,9 +97,9 @@ def build_graph_plot(queryset, x_axis, y_axis, segment=None):
|
||||
|
||||
# Estimate
|
||||
else:
|
||||
queryset = queryset.annotate(estimate=Sum("estimate_point")).order_by(
|
||||
x_axis
|
||||
)
|
||||
queryset = queryset.annotate(
|
||||
estimate=Sum(Cast("estimate_point__value", IntegerField()))
|
||||
).order_by(x_axis)
|
||||
queryset = (
|
||||
queryset.annotate(segment=F(segment)) if segment else queryset
|
||||
)
|
||||
@@ -110,9 +120,33 @@ def build_graph_plot(queryset, x_axis, y_axis, segment=None):
|
||||
return sort_data(grouped_data, temp_axis)
|
||||
|
||||
|
||||
def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
|
||||
def burndown_plot(
|
||||
queryset,
|
||||
slug,
|
||||
project_id,
|
||||
plot_type,
|
||||
cycle_id=None,
|
||||
module_id=None,
|
||||
):
|
||||
# Total Issues in Cycle or Module
|
||||
total_issues = queryset.total_issues
|
||||
# check whether the estimate is a point or not
|
||||
estimate_type = Project.objects.filter(
|
||||
workspace__slug=slug,
|
||||
pk=project_id,
|
||||
estimate__isnull=False,
|
||||
estimate__type="points",
|
||||
).exists()
|
||||
if estimate_type and plot_type == "points":
|
||||
issue_estimates = Issue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
estimate_point__isnull=False,
|
||||
).values_list("estimate_point__value", flat=True)
|
||||
|
||||
issue_estimates = [int(value) for value in issue_estimates]
|
||||
total_estimate_points = sum(issue_estimates)
|
||||
|
||||
if cycle_id:
|
||||
if queryset.end_date and queryset.start_date:
|
||||
@@ -128,18 +162,32 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
|
||||
|
||||
chart_data = {str(date): 0 for date in date_range}
|
||||
|
||||
completed_issues_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
if plot_type == "points":
|
||||
completed_issues_estimate_point_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
estimate_point__isnull=False,
|
||||
)
|
||||
.annotate(date=TruncDate("completed_at"))
|
||||
.values("date")
|
||||
.values("date", "estimate_point__value")
|
||||
.order_by("date")
|
||||
)
|
||||
else:
|
||||
completed_issues_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
)
|
||||
.annotate(date=TruncDate("completed_at"))
|
||||
.values("date")
|
||||
.annotate(total_completed=Count("id"))
|
||||
.values("date", "total_completed")
|
||||
.order_by("date")
|
||||
)
|
||||
.annotate(date=TruncDate("completed_at"))
|
||||
.values("date")
|
||||
.annotate(total_completed=Count("id"))
|
||||
.values("date", "total_completed")
|
||||
.order_by("date")
|
||||
)
|
||||
|
||||
if module_id:
|
||||
# Get all dates between the two dates
|
||||
@@ -152,31 +200,59 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
|
||||
|
||||
chart_data = {str(date): 0 for date in date_range}
|
||||
|
||||
completed_issues_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_module__module_id=module_id,
|
||||
if plot_type == "points":
|
||||
completed_issues_estimate_point_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_module__module_id=module_id,
|
||||
estimate_point__isnull=False,
|
||||
)
|
||||
.annotate(date=TruncDate("completed_at"))
|
||||
.values("date")
|
||||
.values("date", "estimate_point__value")
|
||||
.order_by("date")
|
||||
)
|
||||
else:
|
||||
completed_issues_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_module__module_id=module_id,
|
||||
)
|
||||
.annotate(date=TruncDate("completed_at"))
|
||||
.values("date")
|
||||
.annotate(total_completed=Count("id"))
|
||||
.values("date", "total_completed")
|
||||
.order_by("date")
|
||||
)
|
||||
.annotate(date=TruncDate("completed_at"))
|
||||
.values("date")
|
||||
.annotate(total_completed=Count("id"))
|
||||
.values("date", "total_completed")
|
||||
.order_by("date")
|
||||
)
|
||||
|
||||
for date in date_range:
|
||||
cumulative_pending_issues = total_issues
|
||||
total_completed = 0
|
||||
total_completed = sum(
|
||||
item["total_completed"]
|
||||
for item in completed_issues_distribution
|
||||
if item["date"] is not None and item["date"] <= date
|
||||
)
|
||||
cumulative_pending_issues -= total_completed
|
||||
if date > timezone.now().date():
|
||||
chart_data[str(date)] = None
|
||||
if plot_type == "points":
|
||||
cumulative_pending_issues = total_estimate_points
|
||||
total_completed = 0
|
||||
total_completed = sum(
|
||||
int(item["estimate_point__value"])
|
||||
for item in completed_issues_estimate_point_distribution
|
||||
if item["date"] is not None and item["date"] <= date
|
||||
)
|
||||
cumulative_pending_issues -= total_completed
|
||||
if date > timezone.now().date():
|
||||
chart_data[str(date)] = None
|
||||
else:
|
||||
chart_data[str(date)] = cumulative_pending_issues
|
||||
else:
|
||||
chart_data[str(date)] = cumulative_pending_issues
|
||||
cumulative_pending_issues = total_issues
|
||||
total_completed = 0
|
||||
total_completed = sum(
|
||||
item["total_completed"]
|
||||
for item in completed_issues_distribution
|
||||
if item["date"] is not None and item["date"] <= date
|
||||
)
|
||||
cumulative_pending_issues -= total_completed
|
||||
if date > timezone.now().date():
|
||||
chart_data[str(date)] = None
|
||||
else:
|
||||
chart_data[str(date)] = cumulative_pending_issues
|
||||
|
||||
return chart_data
|
||||
|
||||
@@ -66,7 +66,7 @@ def invalidate_cache_directly(
|
||||
custom_path = path if path is not None else request.get_full_path()
|
||||
auth_header = (
|
||||
None
|
||||
if request.user.is_anonymous
|
||||
if request and request.user.is_anonymous
|
||||
else str(request.user.id) if user else None
|
||||
)
|
||||
key = generate_cache_key(custom_path, auth_header)
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
# Python imports
|
||||
import logging
|
||||
import traceback
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from sentry_sdk import capture_exception
|
||||
@@ -11,6 +15,10 @@ def log_exception(e):
|
||||
logger = logging.getLogger("plane")
|
||||
logger.error(e)
|
||||
|
||||
# Log traceback if running in Debug
|
||||
if settings.DEBUG:
|
||||
logger.error(traceback.format_exc(e))
|
||||
|
||||
# Capture in sentry if configured
|
||||
capture_exception(e)
|
||||
return
|
||||
|
||||
@@ -1,240 +1,191 @@
|
||||
def resolve_keys(group_keys, value):
|
||||
"""resolve keys to a key which will be used for
|
||||
grouping
|
||||
# Django imports
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import Q, UUIDField, Value
|
||||
from django.db.models.functions import Coalesce
|
||||
|
||||
Args:
|
||||
group_keys (string): key which will be used for grouping
|
||||
value (obj): data value
|
||||
|
||||
Returns:
|
||||
string: the key which will be used for
|
||||
"""
|
||||
keys = group_keys.split(".")
|
||||
for key in keys:
|
||||
value = value.get(key, None)
|
||||
return value
|
||||
# Module imports
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
Issue,
|
||||
Label,
|
||||
Module,
|
||||
Project,
|
||||
ProjectMember,
|
||||
State,
|
||||
WorkspaceMember,
|
||||
)
|
||||
|
||||
|
||||
def group_results(results_data, group_by, sub_group_by=False):
|
||||
"""group results data into certain group_by
|
||||
def issue_queryset_grouper(queryset, group_by, sub_group_by):
|
||||
|
||||
Args:
|
||||
results_data (obj): complete results data
|
||||
group_by (key): string
|
||||
FIELD_MAPPER = {
|
||||
"label_ids": "labels__id",
|
||||
"assignee_ids": "assignees__id",
|
||||
"module_ids": "issue_module__module_id",
|
||||
}
|
||||
|
||||
Returns:
|
||||
obj: grouped results
|
||||
"""
|
||||
if sub_group_by:
|
||||
main_responsive_dict = dict()
|
||||
annotations_map = {
|
||||
"assignee_ids": ("assignees__id", ~Q(assignees__id__isnull=True)),
|
||||
"label_ids": ("labels__id", ~Q(labels__id__isnull=True)),
|
||||
"module_ids": (
|
||||
"issue_module__module_id",
|
||||
~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
}
|
||||
default_annotations = {
|
||||
key: Coalesce(
|
||||
ArrayAgg(
|
||||
field,
|
||||
distinct=True,
|
||||
filter=condition,
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
)
|
||||
for key, (field, condition) in annotations_map.items()
|
||||
if FIELD_MAPPER.get(key) != group_by
|
||||
or FIELD_MAPPER.get(key) != sub_group_by
|
||||
}
|
||||
|
||||
if sub_group_by == "priority":
|
||||
main_responsive_dict = {
|
||||
"urgent": {},
|
||||
"high": {},
|
||||
"medium": {},
|
||||
"low": {},
|
||||
"none": {},
|
||||
}
|
||||
return queryset.annotate(**default_annotations)
|
||||
|
||||
for value in results_data:
|
||||
main_group_attribute = resolve_keys(sub_group_by, value)
|
||||
group_attribute = resolve_keys(group_by, value)
|
||||
if isinstance(main_group_attribute, list) and not isinstance(
|
||||
group_attribute, list
|
||||
):
|
||||
if len(main_group_attribute):
|
||||
for attrib in main_group_attribute:
|
||||
if str(attrib) not in main_responsive_dict:
|
||||
main_responsive_dict[str(attrib)] = {}
|
||||
if (
|
||||
str(group_attribute)
|
||||
in main_responsive_dict[str(attrib)]
|
||||
):
|
||||
main_responsive_dict[str(attrib)][
|
||||
str(group_attribute)
|
||||
].append(value)
|
||||
else:
|
||||
main_responsive_dict[str(attrib)][
|
||||
str(group_attribute)
|
||||
] = []
|
||||
main_responsive_dict[str(attrib)][
|
||||
str(group_attribute)
|
||||
].append(value)
|
||||
else:
|
||||
if str(None) not in main_responsive_dict:
|
||||
main_responsive_dict[str(None)] = {}
|
||||
|
||||
if str(group_attribute) in main_responsive_dict[str(None)]:
|
||||
main_responsive_dict[str(None)][
|
||||
str(group_attribute)
|
||||
].append(value)
|
||||
else:
|
||||
main_responsive_dict[str(None)][
|
||||
str(group_attribute)
|
||||
] = []
|
||||
main_responsive_dict[str(None)][
|
||||
str(group_attribute)
|
||||
].append(value)
|
||||
def issue_on_results(issues, group_by, sub_group_by):
|
||||
|
||||
elif isinstance(group_attribute, list) and not isinstance(
|
||||
main_group_attribute, list
|
||||
):
|
||||
if str(main_group_attribute) not in main_responsive_dict:
|
||||
main_responsive_dict[str(main_group_attribute)] = {}
|
||||
if len(group_attribute):
|
||||
for attrib in group_attribute:
|
||||
if (
|
||||
str(attrib)
|
||||
in main_responsive_dict[str(main_group_attribute)]
|
||||
):
|
||||
main_responsive_dict[str(main_group_attribute)][
|
||||
str(attrib)
|
||||
].append(value)
|
||||
else:
|
||||
main_responsive_dict[str(main_group_attribute)][
|
||||
str(attrib)
|
||||
] = []
|
||||
main_responsive_dict[str(main_group_attribute)][
|
||||
str(attrib)
|
||||
].append(value)
|
||||
else:
|
||||
if (
|
||||
str(None)
|
||||
in main_responsive_dict[str(main_group_attribute)]
|
||||
):
|
||||
main_responsive_dict[str(main_group_attribute)][
|
||||
str(None)
|
||||
].append(value)
|
||||
else:
|
||||
main_responsive_dict[str(main_group_attribute)][
|
||||
str(None)
|
||||
] = []
|
||||
main_responsive_dict[str(main_group_attribute)][
|
||||
str(None)
|
||||
].append(value)
|
||||
FIELD_MAPPER = {
|
||||
"labels__id": "label_ids",
|
||||
"assignees__id": "assignee_ids",
|
||||
"issue_module__module_id": "module_ids",
|
||||
}
|
||||
|
||||
elif isinstance(group_attribute, list) and isinstance(
|
||||
main_group_attribute, list
|
||||
):
|
||||
if len(main_group_attribute):
|
||||
for main_attrib in main_group_attribute:
|
||||
if str(main_attrib) not in main_responsive_dict:
|
||||
main_responsive_dict[str(main_attrib)] = {}
|
||||
if len(group_attribute):
|
||||
for attrib in group_attribute:
|
||||
if (
|
||||
str(attrib)
|
||||
in main_responsive_dict[str(main_attrib)]
|
||||
):
|
||||
main_responsive_dict[str(main_attrib)][
|
||||
str(attrib)
|
||||
].append(value)
|
||||
else:
|
||||
main_responsive_dict[str(main_attrib)][
|
||||
str(attrib)
|
||||
] = []
|
||||
main_responsive_dict[str(main_attrib)][
|
||||
str(attrib)
|
||||
].append(value)
|
||||
else:
|
||||
if (
|
||||
str(None)
|
||||
in main_responsive_dict[str(main_attrib)]
|
||||
):
|
||||
main_responsive_dict[str(main_attrib)][
|
||||
str(None)
|
||||
].append(value)
|
||||
else:
|
||||
main_responsive_dict[str(main_attrib)][
|
||||
str(None)
|
||||
] = []
|
||||
main_responsive_dict[str(main_attrib)][
|
||||
str(None)
|
||||
].append(value)
|
||||
else:
|
||||
if str(None) not in main_responsive_dict:
|
||||
main_responsive_dict[str(None)] = {}
|
||||
if len(group_attribute):
|
||||
for attrib in group_attribute:
|
||||
if str(attrib) in main_responsive_dict[str(None)]:
|
||||
main_responsive_dict[str(None)][
|
||||
str(attrib)
|
||||
].append(value)
|
||||
else:
|
||||
main_responsive_dict[str(None)][
|
||||
str(attrib)
|
||||
] = []
|
||||
main_responsive_dict[str(None)][
|
||||
str(attrib)
|
||||
].append(value)
|
||||
else:
|
||||
if str(None) in main_responsive_dict[str(None)]:
|
||||
main_responsive_dict[str(None)][str(None)].append(
|
||||
value
|
||||
)
|
||||
else:
|
||||
main_responsive_dict[str(None)][str(None)] = []
|
||||
main_responsive_dict[str(None)][str(None)].append(
|
||||
value
|
||||
)
|
||||
else:
|
||||
main_group_attribute = resolve_keys(sub_group_by, value)
|
||||
group_attribute = resolve_keys(group_by, value)
|
||||
original_list = ["assignee_ids", "label_ids", "module_ids"]
|
||||
|
||||
if str(main_group_attribute) not in main_responsive_dict:
|
||||
main_responsive_dict[str(main_group_attribute)] = {}
|
||||
required_fields = [
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
"state__group",
|
||||
]
|
||||
|
||||
if (
|
||||
str(group_attribute)
|
||||
in main_responsive_dict[str(main_group_attribute)]
|
||||
):
|
||||
main_responsive_dict[str(main_group_attribute)][
|
||||
str(group_attribute)
|
||||
].append(value)
|
||||
else:
|
||||
main_responsive_dict[str(main_group_attribute)][
|
||||
str(group_attribute)
|
||||
] = []
|
||||
main_responsive_dict[str(main_group_attribute)][
|
||||
str(group_attribute)
|
||||
].append(value)
|
||||
if group_by in FIELD_MAPPER:
|
||||
original_list.remove(FIELD_MAPPER[group_by])
|
||||
original_list.append(group_by)
|
||||
|
||||
return main_responsive_dict
|
||||
if sub_group_by in FIELD_MAPPER:
|
||||
original_list.remove(FIELD_MAPPER[sub_group_by])
|
||||
original_list.append(sub_group_by)
|
||||
|
||||
else:
|
||||
response_dict = {}
|
||||
required_fields.extend(original_list)
|
||||
return issues.values(*required_fields)
|
||||
|
||||
if group_by == "priority":
|
||||
response_dict = {
|
||||
"urgent": [],
|
||||
"high": [],
|
||||
"medium": [],
|
||||
"low": [],
|
||||
"none": [],
|
||||
}
|
||||
|
||||
for value in results_data:
|
||||
group_attribute = resolve_keys(group_by, value)
|
||||
if isinstance(group_attribute, list):
|
||||
if len(group_attribute):
|
||||
for attrib in group_attribute:
|
||||
if str(attrib) in response_dict:
|
||||
response_dict[str(attrib)].append(value)
|
||||
else:
|
||||
response_dict[str(attrib)] = []
|
||||
response_dict[str(attrib)].append(value)
|
||||
else:
|
||||
if str(None) in response_dict:
|
||||
response_dict[str(None)].append(value)
|
||||
else:
|
||||
response_dict[str(None)] = []
|
||||
response_dict[str(None)].append(value)
|
||||
else:
|
||||
if str(group_attribute) in response_dict:
|
||||
response_dict[str(group_attribute)].append(value)
|
||||
else:
|
||||
response_dict[str(group_attribute)] = []
|
||||
response_dict[str(group_attribute)].append(value)
|
||||
|
||||
return response_dict
|
||||
def issue_group_values(field, slug, project_id=None, filters=dict):
|
||||
if field == "state_id":
|
||||
queryset = State.objects.filter(
|
||||
~Q(name="Triage"),
|
||||
workspace__slug=slug,
|
||||
).values_list("id", flat=True)
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id))
|
||||
else:
|
||||
return list(queryset)
|
||||
if field == "labels__id":
|
||||
queryset = Label.objects.filter(workspace__slug=slug).values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id)) + ["None"]
|
||||
else:
|
||||
return list(queryset) + ["None"]
|
||||
if field == "assignees__id":
|
||||
if project_id:
|
||||
return ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).values_list("member_id", flat=True)
|
||||
else:
|
||||
return list(
|
||||
WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug, is_active=True
|
||||
).values_list("member_id", flat=True)
|
||||
)
|
||||
if field == "issue_module__module_id":
|
||||
queryset = Module.objects.filter(
|
||||
workspace__slug=slug,
|
||||
).values_list("id", flat=True)
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id)) + ["None"]
|
||||
else:
|
||||
return list(queryset) + ["None"]
|
||||
if field == "cycle_id":
|
||||
queryset = Cycle.objects.filter(
|
||||
workspace__slug=slug,
|
||||
).values_list("id", flat=True)
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id)) + ["None"]
|
||||
else:
|
||||
return list(queryset) + ["None"]
|
||||
if field == "project_id":
|
||||
queryset = Project.objects.filter(workspace__slug=slug).values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
return list(queryset)
|
||||
if field == "priority":
|
||||
return [
|
||||
"low",
|
||||
"medium",
|
||||
"high",
|
||||
"urgent",
|
||||
"none",
|
||||
]
|
||||
if field == "state__group":
|
||||
return [
|
||||
"backlog",
|
||||
"unstarted",
|
||||
"started",
|
||||
"completed",
|
||||
"cancelled",
|
||||
]
|
||||
if field == "target_date":
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(workspace__slug=slug)
|
||||
.filter(**filters)
|
||||
.values_list("target_date", flat=True)
|
||||
.distinct()
|
||||
)
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id))
|
||||
else:
|
||||
return list(queryset)
|
||||
if field == "start_date":
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(workspace__slug=slug)
|
||||
.filter(**filters)
|
||||
.values_list("start_date", flat=True)
|
||||
.distinct()
|
||||
)
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id))
|
||||
else:
|
||||
return list(queryset)
|
||||
return []
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import re
|
||||
import uuid
|
||||
from datetime import timedelta
|
||||
|
||||
from django.utils import timezone
|
||||
|
||||
# The date from pattern
|
||||
@@ -63,24 +64,27 @@ def date_filter(filter, date_term, queries):
|
||||
"""
|
||||
for query in queries:
|
||||
date_query = query.split(";")
|
||||
if len(date_query) >= 2:
|
||||
match = pattern.match(date_query[0])
|
||||
if match:
|
||||
if len(date_query) == 3:
|
||||
digit, term = date_query[0].split("_")
|
||||
string_date_filter(
|
||||
filter=filter,
|
||||
duration=int(digit),
|
||||
subsequent=date_query[1],
|
||||
term=term,
|
||||
date_filter=date_term,
|
||||
offset=date_query[2],
|
||||
)
|
||||
else:
|
||||
if "after" in date_query:
|
||||
filter[f"{date_term}__gte"] = date_query[0]
|
||||
if date_query:
|
||||
if len(date_query) >= 2:
|
||||
match = pattern.match(date_query[0])
|
||||
if match:
|
||||
if len(date_query) == 3:
|
||||
digit, term = date_query[0].split("_")
|
||||
string_date_filter(
|
||||
filter=filter,
|
||||
duration=int(digit),
|
||||
subsequent=date_query[1],
|
||||
term=term,
|
||||
date_filter=date_term,
|
||||
offset=date_query[2],
|
||||
)
|
||||
else:
|
||||
filter[f"{date_term}__lte"] = date_query[0]
|
||||
if "after" in date_query:
|
||||
filter[f"{date_term}__gte"] = date_query[0]
|
||||
else:
|
||||
filter[f"{date_term}__lte"] = date_query[0]
|
||||
else:
|
||||
filter[f"{date_term}__contains"] = date_query[0]
|
||||
|
||||
|
||||
def filter_state(params, filter, method, prefix=""):
|
||||
|
||||
84
apiserver/plane/utils/order_queryset.py
Normal file
84
apiserver/plane/utils/order_queryset.py
Normal file
@@ -0,0 +1,84 @@
|
||||
from django.db.models import (
|
||||
Case,
|
||||
CharField,
|
||||
Min,
|
||||
Value,
|
||||
When,
|
||||
)
|
||||
|
||||
# Custom ordering for priority and state
|
||||
PRIORITY_ORDER = ["urgent", "high", "medium", "low", "none"]
|
||||
STATE_ORDER = [
|
||||
"backlog",
|
||||
"unstarted",
|
||||
"started",
|
||||
"completed",
|
||||
"cancelled",
|
||||
]
|
||||
|
||||
|
||||
def order_issue_queryset(issue_queryset, order_by_param="-created_at"):
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(PRIORITY_ORDER)
|
||||
],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
order_by_param = (
|
||||
"-priority_order"
|
||||
if order_by_param.startswith("-")
|
||||
else "priority_order"
|
||||
)
|
||||
# State Ordering
|
||||
elif order_by_param in [
|
||||
"state__group",
|
||||
"-state__group",
|
||||
]:
|
||||
state_order = (
|
||||
STATE_ORDER
|
||||
if order_by_param in ["state__name", "state__group"]
|
||||
else STATE_ORDER[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
state_order=Case(
|
||||
*[
|
||||
When(state__group=state_group, then=Value(i))
|
||||
for i, state_group in enumerate(state_order)
|
||||
],
|
||||
default=Value(len(state_order)),
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("state_order")
|
||||
order_by_param = (
|
||||
"-state_order" if order_by_param.startswith("-") else "state_order"
|
||||
)
|
||||
# assignee and label ordering
|
||||
elif order_by_param in [
|
||||
"labels__name",
|
||||
"assignees__first_name",
|
||||
"issue_module__module__name",
|
||||
"-labels__name",
|
||||
"-assignees__first_name",
|
||||
"-issue_module__module__name",
|
||||
]:
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
min_values=Min(
|
||||
order_by_param[1::]
|
||||
if order_by_param.startswith("-")
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-min_values" if order_by_param.startswith("-") else "min_values"
|
||||
)
|
||||
order_by_param = (
|
||||
"-min_values" if order_by_param.startswith("-") else "min_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
order_by_param = order_by_param
|
||||
return issue_queryset, order_by_param
|
||||
@@ -1,33 +1,49 @@
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.exceptions import ParseError
|
||||
from collections.abc import Sequence
|
||||
# Python imports
|
||||
import math
|
||||
from collections import defaultdict
|
||||
from collections.abc import Sequence
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Count, F, Window
|
||||
from django.db.models.functions import RowNumber
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.exceptions import ParseError
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
|
||||
|
||||
class Cursor:
|
||||
# The cursor value
|
||||
def __init__(self, value, offset=0, is_prev=False, has_results=None):
|
||||
self.value = value
|
||||
self.offset = int(offset)
|
||||
self.is_prev = bool(is_prev)
|
||||
self.has_results = has_results
|
||||
|
||||
# Return the cursor value in string format
|
||||
def __str__(self):
|
||||
return f"{self.value}:{self.offset}:{int(self.is_prev)}"
|
||||
|
||||
# Return the cursor value
|
||||
def __eq__(self, other):
|
||||
return all(
|
||||
getattr(self, attr) == getattr(other, attr)
|
||||
for attr in ("value", "offset", "is_prev", "has_results")
|
||||
)
|
||||
|
||||
# Return the representation of the cursor
|
||||
def __repr__(self):
|
||||
return f"{type(self).__name__,}: value={self.value} offset={self.offset}, is_prev={int(self.is_prev)}"
|
||||
|
||||
# Return if the cursor is true
|
||||
def __bool__(self):
|
||||
return bool(self.has_results)
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, value):
|
||||
"""Return the cursor value from string format"""
|
||||
try:
|
||||
bits = value.split(":")
|
||||
if len(bits) != 3:
|
||||
@@ -50,15 +66,19 @@ class CursorResult(Sequence):
|
||||
self.max_hits = max_hits
|
||||
|
||||
def __len__(self):
|
||||
# Return the length of the results
|
||||
return len(self.results)
|
||||
|
||||
def __iter__(self):
|
||||
# Return the iterator of the results
|
||||
return iter(self.results)
|
||||
|
||||
def __getitem__(self, key):
|
||||
# Return the results based on the key
|
||||
return self.results[key]
|
||||
|
||||
def __repr__(self):
|
||||
# Return the representation of the results
|
||||
return f"<{type(self).__name__}: results={len(self.results)}>"
|
||||
|
||||
|
||||
@@ -85,11 +105,14 @@ class OffsetPaginator:
|
||||
max_offset=None,
|
||||
on_results=None,
|
||||
):
|
||||
# Key tuple and remove `-` if descending order by
|
||||
self.key = (
|
||||
order_by
|
||||
if order_by is None or isinstance(order_by, (list, tuple, set))
|
||||
else (order_by,)
|
||||
else (order_by[1::] if order_by.startswith("-") else order_by,)
|
||||
)
|
||||
# Set desc to true when `-` exists in the order by
|
||||
self.desc = True if order_by.startswith("-") else False
|
||||
self.queryset = queryset
|
||||
self.max_limit = max_limit
|
||||
self.max_offset = max_offset
|
||||
@@ -101,11 +124,101 @@ class OffsetPaginator:
|
||||
if cursor is None:
|
||||
cursor = Cursor(0, 0, 0)
|
||||
|
||||
# Get the min from limit and max limit
|
||||
limit = min(limit, self.max_limit)
|
||||
|
||||
# queryset
|
||||
queryset = self.queryset
|
||||
if self.key:
|
||||
queryset = queryset.order_by(*self.key)
|
||||
queryset = queryset.order_by(
|
||||
(
|
||||
F(*self.key).desc(nulls_last=True)
|
||||
if self.desc
|
||||
else F(*self.key).asc(nulls_last=True)
|
||||
),
|
||||
"-created_at",
|
||||
)
|
||||
# The current page
|
||||
page = cursor.offset
|
||||
# The offset
|
||||
offset = cursor.offset * cursor.value
|
||||
stop = offset + (cursor.value or limit) + 1
|
||||
|
||||
if self.max_offset is not None and offset >= self.max_offset:
|
||||
raise BadPaginationError("Pagination offset too large")
|
||||
if offset < 0:
|
||||
raise BadPaginationError("Pagination offset cannot be negative")
|
||||
|
||||
results = queryset[offset:stop]
|
||||
|
||||
if cursor.value != limit:
|
||||
results = results[-(limit + 1) :]
|
||||
|
||||
# Adjust cursors based on the results for pagination
|
||||
next_cursor = Cursor(limit, page + 1, False, results.count() > limit)
|
||||
# If the page is greater than 0, then set the previous cursor
|
||||
prev_cursor = Cursor(limit, page - 1, True, page > 0)
|
||||
|
||||
# Process the results
|
||||
results = results[:limit]
|
||||
|
||||
# Process the results
|
||||
if self.on_results:
|
||||
results = self.on_results(results)
|
||||
|
||||
# Count the queryset
|
||||
count = queryset.count()
|
||||
|
||||
# Optionally, calculate the total count and max_hits if needed
|
||||
max_hits = math.ceil(count / limit)
|
||||
|
||||
# Return the cursor results
|
||||
return CursorResult(
|
||||
results=results,
|
||||
next=next_cursor,
|
||||
prev=prev_cursor,
|
||||
hits=count,
|
||||
max_hits=max_hits,
|
||||
)
|
||||
|
||||
def process_results(self, results):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class GroupedOffsetPaginator(OffsetPaginator):
|
||||
|
||||
# Field mappers
|
||||
FIELD_MAPPER = {
|
||||
"labels__id": "label_ids",
|
||||
"assignees__id": "assignee_ids",
|
||||
"modules__id": "module_ids",
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
queryset,
|
||||
group_by_field_name,
|
||||
group_by_fields,
|
||||
count_filter,
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
# Initiate the parent class for all the parameters
|
||||
super().__init__(queryset, *args, **kwargs)
|
||||
self.group_by_field_name = group_by_field_name
|
||||
self.group_by_fields = group_by_fields
|
||||
self.count_filter = count_filter
|
||||
|
||||
def get_result(self, limit=50, cursor=None):
|
||||
# offset is page #
|
||||
# value is page limit
|
||||
if cursor is None:
|
||||
cursor = Cursor(0, 0, 0)
|
||||
|
||||
limit = min(limit, self.max_limit)
|
||||
|
||||
# Adjust the initial offset and stop based on the cursor and limit
|
||||
queryset = self.queryset
|
||||
|
||||
page = cursor.offset
|
||||
offset = cursor.offset * cursor.value
|
||||
@@ -116,20 +229,73 @@ class OffsetPaginator:
|
||||
if offset < 0:
|
||||
raise BadPaginationError("Pagination offset cannot be negative")
|
||||
|
||||
results = list(queryset[offset:stop])
|
||||
if cursor.value != limit:
|
||||
results = results[-(limit + 1) :]
|
||||
# Compute the results
|
||||
results = {}
|
||||
# Create window for all the groups
|
||||
queryset = queryset.annotate(
|
||||
row_number=Window(
|
||||
expression=RowNumber(),
|
||||
partition_by=[F(self.group_by_field_name)],
|
||||
order_by=(
|
||||
(
|
||||
F(*self.key).desc(
|
||||
nulls_last=True
|
||||
) # order by desc if desc is set
|
||||
if self.desc
|
||||
else F(*self.key).asc(
|
||||
nulls_last=True
|
||||
) # Order by asc if set
|
||||
),
|
||||
F("created_at").desc(),
|
||||
),
|
||||
)
|
||||
)
|
||||
# Filter the results by row number
|
||||
results = queryset.filter(
|
||||
row_number__gt=offset, row_number__lt=stop
|
||||
).order_by(
|
||||
(
|
||||
F(*self.key).desc(nulls_last=True)
|
||||
if self.desc
|
||||
else F(*self.key).asc(nulls_last=True)
|
||||
),
|
||||
F("created_at").desc(),
|
||||
)
|
||||
|
||||
next_cursor = Cursor(limit, page + 1, False, len(results) > limit)
|
||||
prev_cursor = Cursor(limit, page - 1, True, page > 0)
|
||||
|
||||
results = list(results[:limit])
|
||||
if self.on_results:
|
||||
results = self.on_results(results)
|
||||
# Adjust cursors based on the grouped results for pagination
|
||||
next_cursor = Cursor(
|
||||
limit,
|
||||
page + 1,
|
||||
False,
|
||||
queryset.filter(row_number__gte=stop).exists(),
|
||||
)
|
||||
prev_cursor = Cursor(
|
||||
limit,
|
||||
page - 1,
|
||||
True,
|
||||
page > 0,
|
||||
)
|
||||
|
||||
# Count the queryset
|
||||
count = queryset.count()
|
||||
max_hits = math.ceil(count / limit)
|
||||
|
||||
# Optionally, calculate the total count and max_hits if needed
|
||||
# This might require adjustments based on specific use cases
|
||||
if results:
|
||||
max_hits = math.ceil(
|
||||
queryset.values(self.group_by_field_name)
|
||||
.annotate(
|
||||
count=Count(
|
||||
"id",
|
||||
filter=self.count_filter,
|
||||
distinct=True,
|
||||
)
|
||||
)
|
||||
.order_by("-count")[0]["count"]
|
||||
/ limit
|
||||
)
|
||||
else:
|
||||
max_hits = 0
|
||||
return CursorResult(
|
||||
results=results,
|
||||
next=next_cursor,
|
||||
@@ -138,6 +304,393 @@ class OffsetPaginator:
|
||||
max_hits=max_hits,
|
||||
)
|
||||
|
||||
def __get_total_queryset(self):
|
||||
# Get total queryset
|
||||
return (
|
||||
self.queryset.values(self.group_by_field_name)
|
||||
.annotate(
|
||||
count=Count(
|
||||
"id",
|
||||
filter=self.count_filter,
|
||||
distinct=True,
|
||||
)
|
||||
)
|
||||
.order_by()
|
||||
)
|
||||
|
||||
def __get_total_dict(self):
|
||||
# Convert the total into dictionary of keys as group name and value as the total
|
||||
total_group_dict = {}
|
||||
for group in self.__get_total_queryset():
|
||||
total_group_dict[str(group.get(self.group_by_field_name))] = (
|
||||
total_group_dict.get(
|
||||
str(group.get(self.group_by_field_name)), 0
|
||||
)
|
||||
+ (1 if group.get("count") == 0 else group.get("count"))
|
||||
)
|
||||
|
||||
return total_group_dict
|
||||
|
||||
def __get_field_dict(self):
|
||||
# Create a field dictionary
|
||||
total_group_dict = self.__get_total_dict()
|
||||
return {
|
||||
str(field): {
|
||||
"results": [],
|
||||
"total_results": total_group_dict.get(str(field), 0),
|
||||
}
|
||||
for field in self.group_by_fields
|
||||
}
|
||||
|
||||
def __result_already_added(self, result, group):
|
||||
# Check if the result is already added then add it
|
||||
for existing_issue in group:
|
||||
if existing_issue["id"] == result["id"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def __query_multi_grouper(self, results):
|
||||
# Grouping for m2m values
|
||||
total_group_dict = self.__get_total_dict()
|
||||
|
||||
# Preparing a dict to keep track of group IDs associated with each label ID
|
||||
result_group_mapping = defaultdict(set)
|
||||
# Preparing a dict to group result by group ID
|
||||
grouped_by_field_name = defaultdict(list)
|
||||
|
||||
# Iterate over results to fill the above dictionaries
|
||||
for result in results:
|
||||
result_id = result["id"]
|
||||
group_id = result[self.group_by_field_name]
|
||||
result_group_mapping[str(result_id)].add(str(group_id))
|
||||
|
||||
# Adding group_ids key to each issue and grouping by group_name
|
||||
for result in results:
|
||||
result_id = result["id"]
|
||||
group_ids = list(result_group_mapping[str(result_id)])
|
||||
result[self.FIELD_MAPPER.get(self.group_by_field_name)] = (
|
||||
[] if "None" in group_ids else group_ids
|
||||
)
|
||||
# If a result belongs to multiple groups, add it to each group
|
||||
for group_id in group_ids:
|
||||
if not self.__result_already_added(
|
||||
result, grouped_by_field_name[group_id]
|
||||
):
|
||||
grouped_by_field_name[group_id].append(result)
|
||||
|
||||
# Convert grouped_by_field_name back to a list for each group
|
||||
processed_results = {
|
||||
str(group_id): {
|
||||
"results": issues,
|
||||
"total_results": total_group_dict.get(str(group_id)),
|
||||
}
|
||||
for group_id, issues in grouped_by_field_name.items()
|
||||
}
|
||||
|
||||
return processed_results
|
||||
|
||||
def __query_grouper(self, results):
|
||||
# Grouping for single values
|
||||
processed_results = self.__get_field_dict()
|
||||
for result in results:
|
||||
(
|
||||
print(result["created_at"].date(), result["priority"])
|
||||
if str(result[self.group_by_field_name])
|
||||
== "c88dfd3b-e97e-4948-851b-a5fe1e36ffd0"
|
||||
else None
|
||||
)
|
||||
group_value = str(result.get(self.group_by_field_name))
|
||||
if group_value in processed_results:
|
||||
processed_results[str(group_value)]["results"].append(result)
|
||||
|
||||
return processed_results
|
||||
|
||||
def process_results(self, results):
|
||||
# Process results
|
||||
if results:
|
||||
if self.group_by_field_name in self.FIELD_MAPPER:
|
||||
processed_results = self.__query_multi_grouper(results=results)
|
||||
else:
|
||||
processed_results = self.__query_grouper(results=results)
|
||||
else:
|
||||
processed_results = {}
|
||||
return processed_results
|
||||
|
||||
|
||||
class SubGroupedOffsetPaginator(OffsetPaginator):
|
||||
FIELD_MAPPER = {
|
||||
"labels__id": "label_ids",
|
||||
"assignees__id": "assignee_ids",
|
||||
"modules__id": "module_ids",
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
queryset,
|
||||
group_by_field_name,
|
||||
sub_group_by_field_name,
|
||||
group_by_fields,
|
||||
sub_group_by_fields,
|
||||
count_filter,
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(queryset, *args, **kwargs)
|
||||
self.group_by_field_name = group_by_field_name
|
||||
self.group_by_fields = group_by_fields
|
||||
self.sub_group_by_field_name = sub_group_by_field_name
|
||||
self.sub_group_by_fields = sub_group_by_fields
|
||||
self.count_filter = count_filter
|
||||
|
||||
def get_result(self, limit=30, cursor=None):
|
||||
# offset is page #
|
||||
# value is page limit
|
||||
if cursor is None:
|
||||
cursor = Cursor(0, 0, 0)
|
||||
|
||||
limit = min(limit, self.max_limit)
|
||||
|
||||
# Adjust the initial offset and stop based on the cursor and limit
|
||||
queryset = self.queryset
|
||||
|
||||
page = cursor.offset
|
||||
offset = cursor.offset * cursor.value
|
||||
stop = offset + (cursor.value or limit) + 1
|
||||
|
||||
if self.max_offset is not None and offset >= self.max_offset:
|
||||
raise BadPaginationError("Pagination offset too large")
|
||||
if offset < 0:
|
||||
raise BadPaginationError("Pagination offset cannot be negative")
|
||||
|
||||
# Compute the results
|
||||
results = {}
|
||||
|
||||
# Create windows for group and sub group field name
|
||||
queryset = queryset.annotate(
|
||||
row_number=Window(
|
||||
expression=RowNumber(),
|
||||
partition_by=[
|
||||
F(self.group_by_field_name),
|
||||
F(self.sub_group_by_field_name),
|
||||
],
|
||||
order_by=(
|
||||
(
|
||||
F(*self.key).desc(nulls_last=True)
|
||||
if self.desc
|
||||
else F(*self.key).asc(nulls_last=True)
|
||||
),
|
||||
"-created_at",
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# Filter the results
|
||||
results = queryset.filter(
|
||||
row_number__gt=offset, row_number__lt=stop
|
||||
).order_by(
|
||||
(
|
||||
F(*self.key).desc(nulls_last=True)
|
||||
if self.desc
|
||||
else F(*self.key).asc(nulls_last=True)
|
||||
),
|
||||
F("created_at").desc(),
|
||||
)
|
||||
|
||||
# Adjust cursors based on the grouped results for pagination
|
||||
next_cursor = Cursor(
|
||||
limit,
|
||||
page + 1,
|
||||
False,
|
||||
queryset.filter(row_number__gte=stop).exists(),
|
||||
)
|
||||
prev_cursor = Cursor(
|
||||
limit,
|
||||
page - 1,
|
||||
True,
|
||||
page > 0,
|
||||
)
|
||||
|
||||
# Count the queryset
|
||||
count = queryset.count()
|
||||
|
||||
# Optionally, calculate the total count and max_hits if needed
|
||||
# This might require adjustments based on specific use cases
|
||||
if results:
|
||||
max_hits = math.ceil(
|
||||
queryset.values(self.group_by_field_name)
|
||||
.annotate(
|
||||
count=Count(
|
||||
"id",
|
||||
filter=self.count_filter,
|
||||
distinct=True,
|
||||
)
|
||||
)
|
||||
.order_by("-count")[0]["count"]
|
||||
/ limit
|
||||
)
|
||||
else:
|
||||
max_hits = 0
|
||||
return CursorResult(
|
||||
results=results,
|
||||
next=next_cursor,
|
||||
prev=prev_cursor,
|
||||
hits=count,
|
||||
max_hits=max_hits,
|
||||
)
|
||||
|
||||
def __get_group_total_queryset(self):
|
||||
# Get group totals
|
||||
return (
|
||||
self.queryset.order_by(self.group_by_field_name)
|
||||
.values(self.group_by_field_name)
|
||||
.annotate(
|
||||
count=Count(
|
||||
"id",
|
||||
filter=self.count_filter,
|
||||
distinct=True,
|
||||
)
|
||||
)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def __get_subgroup_total_queryset(self):
|
||||
# Get subgroup totals
|
||||
return (
|
||||
self.queryset.values(
|
||||
self.group_by_field_name, self.sub_group_by_field_name
|
||||
)
|
||||
.annotate(
|
||||
count=Count("id", filter=self.count_filter, distinct=True)
|
||||
)
|
||||
.order_by()
|
||||
.values(
|
||||
self.group_by_field_name, self.sub_group_by_field_name, "count"
|
||||
)
|
||||
)
|
||||
|
||||
def __get_total_dict(self):
|
||||
# Use the above to convert to dictionary of 2D objects
|
||||
total_group_dict = {}
|
||||
total_sub_group_dict = {}
|
||||
for group in self.__get_group_total_queryset():
|
||||
total_group_dict[str(group.get(self.group_by_field_name))] = (
|
||||
total_group_dict.get(
|
||||
str(group.get(self.group_by_field_name)), 0
|
||||
)
|
||||
+ (1 if group.get("count") == 0 else group.get("count"))
|
||||
)
|
||||
|
||||
# Sub group total values
|
||||
for item in self.__get_subgroup_total_queryset():
|
||||
group = str(item[self.group_by_field_name])
|
||||
subgroup = str(item[self.sub_group_by_field_name])
|
||||
count = item["count"]
|
||||
|
||||
if group not in total_sub_group_dict:
|
||||
total_sub_group_dict[str(group)] = {}
|
||||
|
||||
if subgroup not in total_sub_group_dict[group]:
|
||||
total_sub_group_dict[str(group)][str(subgroup)] = {}
|
||||
|
||||
total_sub_group_dict[group][subgroup] = count
|
||||
|
||||
return total_group_dict, total_sub_group_dict
|
||||
|
||||
def __get_field_dict(self):
|
||||
total_group_dict, total_sub_group_dict = self.__get_total_dict()
|
||||
|
||||
return {
|
||||
str(group): {
|
||||
"results": {
|
||||
str(sub_group): {
|
||||
"results": [],
|
||||
"total_results": total_sub_group_dict.get(
|
||||
str(group)
|
||||
).get(str(sub_group), 0),
|
||||
}
|
||||
for sub_group in total_sub_group_dict.get(str(group), [])
|
||||
},
|
||||
"total_results": total_group_dict.get(str(group), 0),
|
||||
}
|
||||
for group in self.group_by_fields
|
||||
}
|
||||
|
||||
def __query_multi_grouper(self, results):
|
||||
# Multi grouper
|
||||
processed_results = self.__get_field_dict()
|
||||
# Preparing a dict to keep track of group IDs associated with each label ID
|
||||
result_group_mapping = defaultdict(set)
|
||||
result_sub_group_mapping = defaultdict(set)
|
||||
|
||||
# Iterate over results to fill the above dictionaries
|
||||
if self.group_by_field_name in self.FIELD_MAPPER:
|
||||
for result in results:
|
||||
result_id = result["id"]
|
||||
group_id = result[self.group_by_field_name]
|
||||
result_group_mapping[str(result_id)].add(str(group_id))
|
||||
|
||||
# Use the same calculation for the sub group
|
||||
if self.sub_group_by_field_name in self.FIELD_MAPPER:
|
||||
for result in results:
|
||||
result_id = result["id"]
|
||||
sub_group_id = result[self.sub_group_by_field_name]
|
||||
result_sub_group_mapping[str(result_id)].add(str(sub_group_id))
|
||||
|
||||
# Iterate over results
|
||||
for result in results:
|
||||
# Get the group value
|
||||
group_value = str(result.get(self.group_by_field_name))
|
||||
# Get the sub group value
|
||||
sub_group_value = str(result.get(self.sub_group_by_field_name))
|
||||
if (
|
||||
group_value in processed_results
|
||||
and sub_group_value
|
||||
in processed_results[str(group_value)]["results"]
|
||||
):
|
||||
if self.group_by_field_name in self.FIELD_MAPPER:
|
||||
# for multi grouper
|
||||
group_ids = list(result_group_mapping[str(result_id)])
|
||||
result[self.FIELD_MAPPER.get(self.group_by_field_name)] = (
|
||||
[] if "None" in group_ids else group_ids
|
||||
)
|
||||
if self.sub_group_by_field_name in self.FIELD_MAPPER:
|
||||
sub_group_ids = list(result_group_mapping[str(result_id)])
|
||||
# for multi groups
|
||||
result[self.FIELD_MAPPER.get(self.group_by_field_name)] = (
|
||||
[] if "None" in sub_group_ids else sub_group_ids
|
||||
)
|
||||
|
||||
processed_results[str(group_value)]["results"][
|
||||
str(sub_group_value)
|
||||
]["results"].append(result)
|
||||
|
||||
return processed_results
|
||||
|
||||
def __query_grouper(self, results):
|
||||
# Single grouper
|
||||
processed_results = self.__get_field_dict()
|
||||
for result in results:
|
||||
group_value = str(result.get(self.group_by_field_name))
|
||||
sub_group_value = str(result.get(self.sub_group_by_field_name))
|
||||
processed_results[group_value]["results"][sub_group_value][
|
||||
"results"
|
||||
].append(result)
|
||||
|
||||
return processed_results
|
||||
|
||||
def process_results(self, results):
|
||||
if results:
|
||||
if (
|
||||
self.group_by_field_name in self.FIELD_MAPPER
|
||||
or self.sub_group_by_field_name in self.FIELD_MAPPER
|
||||
):
|
||||
processed_results = self.__query_multi_grouper(results=results)
|
||||
else:
|
||||
processed_results = self.__query_grouper(results=results)
|
||||
else:
|
||||
processed_results = {}
|
||||
return processed_results
|
||||
|
||||
|
||||
class BasePaginator:
|
||||
"""BasePaginator class can be inherited by any View to return a paginated view"""
|
||||
@@ -171,6 +724,11 @@ class BasePaginator:
|
||||
cursor_cls=Cursor,
|
||||
extra_stats=None,
|
||||
controller=None,
|
||||
group_by_field_name=None,
|
||||
group_by_fields=None,
|
||||
sub_group_by_field_name=None,
|
||||
sub_group_by_fields=None,
|
||||
count_filter=None,
|
||||
**paginator_kwargs,
|
||||
):
|
||||
"""Paginate the request"""
|
||||
@@ -178,15 +736,27 @@ class BasePaginator:
|
||||
|
||||
# Convert the cursor value to integer and float from string
|
||||
input_cursor = None
|
||||
if request.GET.get(self.cursor_name):
|
||||
try:
|
||||
input_cursor = cursor_cls.from_string(
|
||||
request.GET.get(self.cursor_name)
|
||||
)
|
||||
except ValueError:
|
||||
raise ParseError(detail="Invalid cursor parameter.")
|
||||
try:
|
||||
input_cursor = cursor_cls.from_string(
|
||||
request.GET.get(self.cursor_name, f"{per_page}:0:0"),
|
||||
)
|
||||
except ValueError:
|
||||
raise ParseError(detail="Invalid cursor parameter.")
|
||||
|
||||
if not paginator:
|
||||
if group_by_field_name:
|
||||
paginator_kwargs["group_by_field_name"] = group_by_field_name
|
||||
paginator_kwargs["group_by_fields"] = group_by_fields
|
||||
paginator_kwargs["count_filter"] = count_filter
|
||||
|
||||
if sub_group_by_field_name:
|
||||
paginator_kwargs["sub_group_by_field_name"] = (
|
||||
sub_group_by_field_name
|
||||
)
|
||||
paginator_kwargs["sub_group_by_fields"] = (
|
||||
sub_group_by_fields
|
||||
)
|
||||
|
||||
paginator = paginator_cls(**paginator_kwargs)
|
||||
|
||||
try:
|
||||
@@ -196,12 +766,14 @@ class BasePaginator:
|
||||
except BadPaginationError:
|
||||
raise ParseError(detail="Error in parsing")
|
||||
|
||||
# Serialize result according to the on_result function
|
||||
if on_results:
|
||||
results = on_results(cursor_result.results)
|
||||
else:
|
||||
results = cursor_result.results
|
||||
|
||||
if group_by_field_name:
|
||||
results = paginator.process_results(results=results)
|
||||
|
||||
# Add Manipulation functions to the response
|
||||
if controller is not None:
|
||||
results = controller(results)
|
||||
@@ -211,6 +783,9 @@ class BasePaginator:
|
||||
# Return the response
|
||||
response = Response(
|
||||
{
|
||||
"grouped_by": group_by_field_name,
|
||||
"sub_grouped_by": sub_group_by_field_name,
|
||||
"total_count": (cursor_result.hits),
|
||||
"next_cursor": str(cursor_result.next),
|
||||
"prev_cursor": str(cursor_result.prev),
|
||||
"next_page_results": cursor_result.next.has_results,
|
||||
|
||||
@@ -60,4 +60,5 @@ zxcvbn==4.4.28
|
||||
# timezone
|
||||
pytz==2024.1
|
||||
# jwt
|
||||
PyJWT==2.8.0
|
||||
PyJWT==2.8.0
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"repository": "https://github.com/makeplane/plane.git",
|
||||
"version": "0.20.0",
|
||||
"version": "0.21.0",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"workspaces": [
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"name": "@plane/constants",
|
||||
"version": "0.20.0",
|
||||
"private": true,
|
||||
"main": "./src/index.ts",
|
||||
"exports": {
|
||||
".": "./src/index.ts",
|
||||
"./*": "./src/*"
|
||||
}
|
||||
}
|
||||
@@ -1,371 +0,0 @@
|
||||
import { ReactNode } from "react";
|
||||
import Link from "next/link";
|
||||
|
||||
export enum EPageTypes {
|
||||
PUBLIC = "PUBLIC",
|
||||
NON_AUTHENTICATED = "NON_AUTHENTICATED",
|
||||
SET_PASSWORD = "SET_PASSWORD",
|
||||
ONBOARDING = "ONBOARDING",
|
||||
AUTHENTICATED = "AUTHENTICATED",
|
||||
}
|
||||
|
||||
export enum EAuthModes {
|
||||
SIGN_IN = "SIGN_IN",
|
||||
SIGN_UP = "SIGN_UP",
|
||||
}
|
||||
|
||||
export enum EAuthSteps {
|
||||
EMAIL = "EMAIL",
|
||||
PASSWORD = "PASSWORD",
|
||||
UNIQUE_CODE = "UNIQUE_CODE",
|
||||
}
|
||||
|
||||
// TODO: remove this
|
||||
export enum EErrorAlertType {
|
||||
BANNER_ALERT = "BANNER_ALERT",
|
||||
INLINE_FIRST_NAME = "INLINE_FIRST_NAME",
|
||||
INLINE_EMAIL = "INLINE_EMAIL",
|
||||
INLINE_PASSWORD = "INLINE_PASSWORD",
|
||||
INLINE_EMAIL_CODE = "INLINE_EMAIL_CODE",
|
||||
}
|
||||
|
||||
export enum EAuthErrorCodes {
|
||||
// Global
|
||||
INSTANCE_NOT_CONFIGURED = "5000",
|
||||
INVALID_EMAIL = "5005",
|
||||
EMAIL_REQUIRED = "5010",
|
||||
SIGNUP_DISABLED = "5015",
|
||||
MAGIC_LINK_LOGIN_DISABLED = "5017",
|
||||
PASSWORD_LOGIN_DISABLED = "5019",
|
||||
SMTP_NOT_CONFIGURED = "5025",
|
||||
// Password strength
|
||||
INVALID_PASSWORD = "5020",
|
||||
// Sign Up
|
||||
USER_ACCOUNT_DEACTIVATED = "5019",
|
||||
USER_ALREADY_EXIST = "5030",
|
||||
AUTHENTICATION_FAILED_SIGN_UP = "5035",
|
||||
REQUIRED_EMAIL_PASSWORD_SIGN_UP = "5040",
|
||||
INVALID_EMAIL_SIGN_UP = "5045",
|
||||
INVALID_EMAIL_MAGIC_SIGN_UP = "5050",
|
||||
MAGIC_SIGN_UP_EMAIL_CODE_REQUIRED = "5055",
|
||||
// Sign In
|
||||
USER_DOES_NOT_EXIST = "5060",
|
||||
AUTHENTICATION_FAILED_SIGN_IN = "5065",
|
||||
REQUIRED_EMAIL_PASSWORD_SIGN_IN = "5070",
|
||||
INVALID_EMAIL_SIGN_IN = "5075",
|
||||
INVALID_EMAIL_MAGIC_SIGN_IN = "5080",
|
||||
MAGIC_SIGN_IN_EMAIL_CODE_REQUIRED = "5085",
|
||||
// Both Sign in and Sign up for magic
|
||||
INVALID_MAGIC_CODE = "5090",
|
||||
EXPIRED_MAGIC_CODE = "5095",
|
||||
EMAIL_CODE_ATTEMPT_EXHAUSTED = "5100",
|
||||
// Oauth
|
||||
GOOGLE_NOT_CONFIGURED = "5105",
|
||||
GITHUB_NOT_CONFIGURED = "5110",
|
||||
GOOGLE_OAUTH_PROVIDER_ERROR = "5115",
|
||||
GITHUB_OAUTH_PROVIDER_ERROR = "5120",
|
||||
// Reset Password
|
||||
INVALID_PASSWORD_TOKEN = "5125",
|
||||
EXPIRED_PASSWORD_TOKEN = "5130",
|
||||
// Change password
|
||||
INCORRECT_OLD_PASSWORD = "5135",
|
||||
MISSING_PASSWORD= "5138",
|
||||
INVALID_NEW_PASSWORD = "5140",
|
||||
// set passowrd
|
||||
PASSWORD_ALREADY_SET = "5145",
|
||||
// Admin
|
||||
ADMIN_ALREADY_EXIST = "5150",
|
||||
REQUIRED_ADMIN_EMAIL_PASSWORD_FIRST_NAME = "5155",
|
||||
INVALID_ADMIN_EMAIL = "5160",
|
||||
INVALID_ADMIN_PASSWORD = "5165",
|
||||
REQUIRED_ADMIN_EMAIL_PASSWORD = "5170",
|
||||
ADMIN_AUTHENTICATION_FAILED = "5175",
|
||||
ADMIN_USER_ALREADY_EXIST = "5180",
|
||||
ADMIN_USER_DOES_NOT_EXIST = "5185",
|
||||
}
|
||||
|
||||
export type TAuthErrorInfo = {
|
||||
type: EErrorAlertType;
|
||||
code: EAuthErrorCodes;
|
||||
title: string;
|
||||
message: ReactNode;
|
||||
};
|
||||
|
||||
const errorCodeMessages: {
|
||||
[key in EAuthErrorCodes]: { title: string; message: (email?: string | undefined) => ReactNode };
|
||||
} = {
|
||||
// global
|
||||
[EAuthErrorCodes.INSTANCE_NOT_CONFIGURED]: {
|
||||
title: `Instance not configured`,
|
||||
message: () => `Instance not configured. Please contact your administrator.`,
|
||||
},
|
||||
[EAuthErrorCodes.SIGNUP_DISABLED]: {
|
||||
title: `Sign up disabled`,
|
||||
message: () => `Sign up disabled. Please contact your administrator.`,
|
||||
},
|
||||
[EAuthErrorCodes.INVALID_PASSWORD]: {
|
||||
title: `Invalid password`,
|
||||
message: () => `Invalid password. Please try again.`,
|
||||
},
|
||||
[EAuthErrorCodes.SMTP_NOT_CONFIGURED]: {
|
||||
title: `SMTP not configured`,
|
||||
message: () => `SMTP not configured. Please contact your administrator.`,
|
||||
},
|
||||
|
||||
// email check in both sign up and sign in
|
||||
[EAuthErrorCodes.INVALID_EMAIL]: {
|
||||
title: `Invalid email`,
|
||||
message: () => `Invalid email. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.EMAIL_REQUIRED]: {
|
||||
title: `Email required`,
|
||||
message: () => `Email required. Please try again.`,
|
||||
},
|
||||
|
||||
// sign up
|
||||
[EAuthenticationErrorCodes.USER_ALREADY_EXIST]: {
|
||||
title: `User already exists`,
|
||||
message: (email = undefined) => (
|
||||
<div>
|
||||
Your account is already registered.
|
||||
<Link
|
||||
className="underline underline-offset-4 font-medium hover:font-bold transition-all"
|
||||
href={`/sign-in${email ? `?email=${encodeURIComponent(email)}` : ``}`}
|
||||
>
|
||||
Sign In
|
||||
</Link>
|
||||
now.
|
||||
</div>
|
||||
),
|
||||
},
|
||||
[EAuthenticationErrorCodes.REQUIRED_EMAIL_PASSWORD_SIGN_UP]: {
|
||||
title: `Email and password required`,
|
||||
message: () => `Email and password required. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.AUTHENTICATION_FAILED_SIGN_UP]: {
|
||||
title: `Authentication failed`,
|
||||
message: () => `Authentication failed. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.INVALID_EMAIL_SIGN_UP]: {
|
||||
title: `Invalid email`,
|
||||
message: () => `Invalid email. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.MAGIC_SIGN_UP_EMAIL_CODE_REQUIRED]: {
|
||||
title: `Email and code required`,
|
||||
message: () => `Email and code required. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.INVALID_EMAIL_MAGIC_SIGN_UP]: {
|
||||
title: `Invalid email`,
|
||||
message: () => `Invalid email. Please try again.`,
|
||||
},
|
||||
|
||||
// sign in
|
||||
[EAuthenticationErrorCodes.USER_ACCOUNT_DEACTIVATED]: {
|
||||
title: `User account deactivated`,
|
||||
message: () => <div>Your account is deactivated. Contact support@plane.so.</div>,
|
||||
},
|
||||
[EAuthenticationErrorCodes.USER_DOES_NOT_EXIST]: {
|
||||
title: `User does not exist`,
|
||||
message: (email = undefined) => (
|
||||
<div>
|
||||
No account found.
|
||||
<Link
|
||||
className="underline underline-offset-4 font-medium hover:font-bold transition-all"
|
||||
href={`/${email ? `?email=${encodeURIComponent(email)}` : ``}`}
|
||||
>
|
||||
Create one
|
||||
</Link>
|
||||
to get started.
|
||||
</div>
|
||||
),
|
||||
},
|
||||
[EAuthenticationErrorCodes.REQUIRED_EMAIL_PASSWORD_SIGN_IN]: {
|
||||
title: `Email and password required`,
|
||||
message: () => `Email and password required. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.AUTHENTICATION_FAILED_SIGN_IN]: {
|
||||
title: `Authentication failed`,
|
||||
message: () => `Authentication failed. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.INVALID_EMAIL_SIGN_IN]: {
|
||||
title: `Invalid email`,
|
||||
message: () => `Invalid email. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.MAGIC_SIGN_IN_EMAIL_CODE_REQUIRED]: {
|
||||
title: `Email and code required`,
|
||||
message: () => `Email and code required. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.INVALID_EMAIL_MAGIC_SIGN_IN]: {
|
||||
title: `Invalid email`,
|
||||
message: () => `Invalid email. Please try again.`,
|
||||
},
|
||||
|
||||
// Both Sign in and Sign up
|
||||
[EAuthenticationErrorCodes.INVALID_MAGIC_CODE]: {
|
||||
title: `Authentication failed`,
|
||||
message: () => `Invalid magic code. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.EXPIRED_MAGIC_CODE]: {
|
||||
title: `Expired magic code`,
|
||||
message: () => `Expired magic code. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.EMAIL_CODE_ATTEMPT_EXHAUSTED]: {
|
||||
title: `Expired magic code`,
|
||||
message: () => `Expired magic code. Please try again.`,
|
||||
},
|
||||
|
||||
// Oauth
|
||||
[EAuthenticationErrorCodes.GOOGLE_NOT_CONFIGURED]: {
|
||||
title: `Google not configured`,
|
||||
message: () => `Google not configured. Please contact your administrator.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.GITHUB_NOT_CONFIGURED]: {
|
||||
title: `GitHub not configured`,
|
||||
message: () => `GitHub not configured. Please contact your administrator.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.GOOGLE_OAUTH_PROVIDER_ERROR]: {
|
||||
title: `Google OAuth provider error`,
|
||||
message: () => `Google OAuth provider error. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.GITHUB_OAUTH_PROVIDER_ERROR]: {
|
||||
title: `GitHub OAuth provider error`,
|
||||
message: () => `GitHub OAuth provider error. Please try again.`,
|
||||
},
|
||||
|
||||
// Reset Password
|
||||
[EAuthenticationErrorCodes.INVALID_PASSWORD_TOKEN]: {
|
||||
title: `Invalid password token`,
|
||||
message: () => `Invalid password token. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.EXPIRED_PASSWORD_TOKEN]: {
|
||||
title: `Expired password token`,
|
||||
message: () => `Expired password token. Please try again.`,
|
||||
},
|
||||
|
||||
// Change password
|
||||
|
||||
[EAuthenticationErrorCodes.MISSING_PASSWORD]: {
|
||||
title: `Password required`,
|
||||
message: () => `Password required. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.INCORRECT_OLD_PASSWORD]: {
|
||||
title: `Incorrect old password`,
|
||||
message: () => `Incorrect old password. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.INVALID_NEW_PASSWORD]: {
|
||||
title: `Invalid new password`,
|
||||
message: () => `Invalid new password. Please try again.`,
|
||||
},
|
||||
|
||||
// set password
|
||||
[EAuthenticationErrorCodes.PASSWORD_ALREADY_SET]: {
|
||||
title: `Password already set`,
|
||||
message: () => `Password already set. Please try again.`,
|
||||
},
|
||||
|
||||
// admin
|
||||
[EAuthenticationErrorCodes.ADMIN_ALREADY_EXIST]: {
|
||||
title: `Admin already exists`,
|
||||
message: () => `Admin already exists. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.REQUIRED_ADMIN_EMAIL_PASSWORD_FIRST_NAME]: {
|
||||
title: `Email, password and first name required`,
|
||||
message: () => `Email, password and first name required. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.INVALID_ADMIN_EMAIL]: {
|
||||
title: `Invalid admin email`,
|
||||
message: () => `Invalid admin email. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.INVALID_ADMIN_PASSWORD]: {
|
||||
title: `Invalid admin password`,
|
||||
message: () => `Invalid admin password. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.REQUIRED_ADMIN_EMAIL_PASSWORD]: {
|
||||
title: `Email and password required`,
|
||||
message: () => `Email and password required. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.ADMIN_AUTHENTICATION_FAILED]: {
|
||||
title: `Authentication failed`,
|
||||
message: () => `Authentication failed. Please try again.`,
|
||||
},
|
||||
[EAuthenticationErrorCodes.ADMIN_USER_ALREADY_EXIST]: {
|
||||
title: `Admin user already exists`,
|
||||
message: () => (
|
||||
<div>
|
||||
Admin user already exists.
|
||||
<Link className="underline underline-offset-4 font-medium hover:font-bold transition-all" href={`/admin`}>
|
||||
Sign In
|
||||
</Link>
|
||||
now.
|
||||
</div>
|
||||
),
|
||||
},
|
||||
[EAuthenticationErrorCodes.ADMIN_USER_DOES_NOT_EXIST]: {
|
||||
title: `Admin user does not exist`,
|
||||
message: () => (
|
||||
<div>
|
||||
Admin user does not exist.
|
||||
<Link className="underline underline-offset-4 font-medium hover:font-bold transition-all" href={`/admin`}>
|
||||
Sign In
|
||||
</Link>
|
||||
now.
|
||||
</div>
|
||||
),
|
||||
},
|
||||
};
|
||||
|
||||
export const authErrorHandler = (
|
||||
errorCode: EAuthenticationErrorCodes,
|
||||
email?: string | undefined
|
||||
): TAuthErrorInfo | undefined => {
|
||||
const bannerAlertErrorCodes = [
|
||||
EAuthenticationErrorCodes.INSTANCE_NOT_CONFIGURED,
|
||||
EAuthenticationErrorCodes.INVALID_EMAIL,
|
||||
EAuthenticationErrorCodes.EMAIL_REQUIRED,
|
||||
EAuthenticationErrorCodes.SIGNUP_DISABLED,
|
||||
EAuthenticationErrorCodes.INVALID_PASSWORD,
|
||||
EAuthenticationErrorCodes.SMTP_NOT_CONFIGURED,
|
||||
EAuthenticationErrorCodes.USER_ALREADY_EXIST,
|
||||
EAuthenticationErrorCodes.AUTHENTICATION_FAILED_SIGN_UP,
|
||||
EAuthenticationErrorCodes.REQUIRED_EMAIL_PASSWORD_SIGN_UP,
|
||||
EAuthenticationErrorCodes.INVALID_EMAIL_SIGN_UP,
|
||||
EAuthenticationErrorCodes.INVALID_EMAIL_MAGIC_SIGN_UP,
|
||||
EAuthenticationErrorCodes.MAGIC_SIGN_UP_EMAIL_CODE_REQUIRED,
|
||||
EAuthenticationErrorCodes.USER_DOES_NOT_EXIST,
|
||||
EAuthenticationErrorCodes.AUTHENTICATION_FAILED_SIGN_IN,
|
||||
EAuthenticationErrorCodes.REQUIRED_EMAIL_PASSWORD_SIGN_IN,
|
||||
EAuthenticationErrorCodes.INVALID_EMAIL_SIGN_IN,
|
||||
EAuthenticationErrorCodes.INVALID_EMAIL_MAGIC_SIGN_IN,
|
||||
EAuthenticationErrorCodes.MAGIC_SIGN_IN_EMAIL_CODE_REQUIRED,
|
||||
EAuthenticationErrorCodes.INVALID_MAGIC_CODE,
|
||||
EAuthenticationErrorCodes.EXPIRED_MAGIC_CODE,
|
||||
EAuthenticationErrorCodes.EMAIL_CODE_ATTEMPT_EXHAUSTED,
|
||||
EAuthenticationErrorCodes.GOOGLE_NOT_CONFIGURED,
|
||||
EAuthenticationErrorCodes.GITHUB_NOT_CONFIGURED,
|
||||
EAuthenticationErrorCodes.GOOGLE_OAUTH_PROVIDER_ERROR,
|
||||
EAuthenticationErrorCodes.GITHUB_OAUTH_PROVIDER_ERROR,
|
||||
EAuthenticationErrorCodes.INVALID_PASSWORD_TOKEN,
|
||||
EAuthenticationErrorCodes.EXPIRED_PASSWORD_TOKEN,
|
||||
EAuthenticationErrorCodes.INCORRECT_OLD_PASSWORD,
|
||||
EAuthenticationErrorCodes.INVALID_NEW_PASSWORD,
|
||||
EAuthenticationErrorCodes.PASSWORD_ALREADY_SET,
|
||||
EAuthenticationErrorCodes.ADMIN_ALREADY_EXIST,
|
||||
EAuthenticationErrorCodes.REQUIRED_ADMIN_EMAIL_PASSWORD_FIRST_NAME,
|
||||
EAuthenticationErrorCodes.INVALID_ADMIN_EMAIL,
|
||||
EAuthenticationErrorCodes.INVALID_ADMIN_PASSWORD,
|
||||
EAuthenticationErrorCodes.REQUIRED_ADMIN_EMAIL_PASSWORD,
|
||||
EAuthenticationErrorCodes.ADMIN_AUTHENTICATION_FAILED,
|
||||
EAuthenticationErrorCodes.ADMIN_USER_ALREADY_EXIST,
|
||||
EAuthenticationErrorCodes.ADMIN_USER_DOES_NOT_EXIST,
|
||||
];
|
||||
|
||||
if (bannerAlertErrorCodes.includes(errorCode))
|
||||
return {
|
||||
type: EErrorAlertType.BANNER_ALERT,
|
||||
code: errorCode,
|
||||
title: errorCodeMessages[errorCode]?.title || "Error",
|
||||
message: errorCodeMessages[errorCode]?.message(email) || "Something went wrong. Please try again.",
|
||||
};
|
||||
|
||||
return undefined;
|
||||
};
|
||||
@@ -1 +0,0 @@
|
||||
export * from "./auth";
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@plane/editor-core",
|
||||
"version": "0.20.0",
|
||||
"version": "0.21.0",
|
||||
"description": "Core Editor that powers Plane",
|
||||
"private": true,
|
||||
"main": "./dist/index.mjs",
|
||||
|
||||
@@ -112,7 +112,7 @@ export const useEditor = ({
|
||||
if (value === null || value === undefined) return;
|
||||
if (editor && !editor.isDestroyed && !editor.storage.image.uploadInProgress) {
|
||||
try {
|
||||
editor.commands.setContent(value);
|
||||
editor.commands.setContent(value, false, { preserveWhitespace: "full" });
|
||||
const currentSavedSelection = savedSelectionRef.current;
|
||||
if (currentSavedSelection) {
|
||||
const docLength = editor.state.doc.content.size;
|
||||
@@ -147,7 +147,7 @@ export const useEditor = ({
|
||||
const item = getEditorMenuItem(itemName);
|
||||
if (item) {
|
||||
if (item.key === "image") {
|
||||
item.command(savedSelection);
|
||||
item.command(savedSelectionRef.current);
|
||||
} else {
|
||||
item.command();
|
||||
}
|
||||
@@ -186,6 +186,7 @@ export const useEditor = ({
|
||||
if (!editorRef.current) return;
|
||||
scrollSummary(editorRef.current, marking);
|
||||
},
|
||||
isEditorReadyToDiscard: () => editorRef.current?.storage.image.uploadInProgress === false,
|
||||
setFocusAtPosition: (position: number) => {
|
||||
if (!editorRef.current || editorRef.current.isDestroyed) {
|
||||
console.error("Editor reference is not available or has been destroyed.");
|
||||
|
||||
@@ -9,7 +9,7 @@ export { isCellSelection } from "src/ui/extensions/table/table/utilities/is-cell
|
||||
// utils
|
||||
export * from "src/lib/utils";
|
||||
export * from "src/ui/extensions/table/table";
|
||||
export { startImageUpload } from "src/ui/plugins/upload-image";
|
||||
export { startImageUpload } from "src/ui/plugins/image/image-upload-handler";
|
||||
|
||||
// components
|
||||
export { EditorContainer } from "src/ui/components/editor-container";
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { Editor, Range } from "@tiptap/core";
|
||||
import { startImageUpload } from "src/ui/plugins/upload-image";
|
||||
import { startImageUpload } from "src/ui/plugins/image/image-upload-handler";
|
||||
import { findTableAncestor } from "src/lib/utils";
|
||||
import { Selection } from "@tiptap/pm/state";
|
||||
import { UploadImage } from "src/types/upload-image";
|
||||
import { replaceCodeWithText } from "src/ui/extensions/code/utils/replace-code-block-with-text";
|
||||
|
||||
export const setText = (editor: Editor, range?: Range) => {
|
||||
if (range) editor.chain().focus().deleteRange(range).clearNodes().run();
|
||||
@@ -54,69 +55,11 @@ export const toggleUnderline = (editor: Editor, range?: Range) => {
|
||||
else editor.chain().focus().toggleUnderline().run();
|
||||
};
|
||||
|
||||
const replaceCodeBlockWithContent = (editor: Editor) => {
|
||||
try {
|
||||
const { schema } = editor.state;
|
||||
const { paragraph } = schema.nodes;
|
||||
let replaced = false;
|
||||
|
||||
const replaceCodeBlock = (from: number, to: number, textContent: string) => {
|
||||
const docSize = editor.state.doc.content.size;
|
||||
|
||||
if (from < 0 || to > docSize || from > to) {
|
||||
console.error("Invalid range for replacement: ", from, to, "in a document of size", docSize);
|
||||
return;
|
||||
}
|
||||
|
||||
// split the textContent by new lines to handle each line as a separate paragraph
|
||||
const lines = textContent.split(/\r?\n/);
|
||||
|
||||
const tr = editor.state.tr;
|
||||
|
||||
// Calculate the position for inserting the first paragraph
|
||||
let insertPos = from;
|
||||
|
||||
// Remove the code block first
|
||||
tr.delete(from, to);
|
||||
|
||||
// For each line, create a paragraph node and insert it
|
||||
lines.forEach((line) => {
|
||||
const paragraphNode = paragraph.create({}, schema.text(line));
|
||||
tr.insert(insertPos, paragraphNode);
|
||||
// Update insertPos for the next insertion
|
||||
insertPos += paragraphNode.nodeSize;
|
||||
});
|
||||
|
||||
// Dispatch the transaction
|
||||
editor.view.dispatch(tr);
|
||||
replaced = true;
|
||||
};
|
||||
|
||||
editor.state.doc.nodesBetween(editor.state.selection.from, editor.state.selection.to, (node, pos) => {
|
||||
if (node.type === schema.nodes.codeBlock) {
|
||||
const startPos = pos;
|
||||
const endPos = pos + node.nodeSize;
|
||||
const textContent = node.textContent;
|
||||
if (textContent.length === 0) {
|
||||
editor.chain().focus().toggleCodeBlock().run();
|
||||
}
|
||||
replaceCodeBlock(startPos, endPos, textContent);
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
if (!replaced) {
|
||||
console.log("No code block to replace.");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("An error occurred while replacing code block content:", error);
|
||||
}
|
||||
};
|
||||
|
||||
export const toggleCodeBlock = (editor: Editor, range?: Range) => {
|
||||
try {
|
||||
// if it's a code block, replace it with the code with paragraphs
|
||||
if (editor.isActive("codeBlock")) {
|
||||
replaceCodeBlockWithContent(editor);
|
||||
replaceCodeWithText(editor);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -124,11 +67,16 @@ export const toggleCodeBlock = (editor: Editor, range?: Range) => {
|
||||
const text = editor.state.doc.textBetween(from, to, "\n");
|
||||
const isMultiline = text.includes("\n");
|
||||
|
||||
// if the selection is not a range i.e. empty, then simply convert it into a code block
|
||||
if (editor.state.selection.empty) {
|
||||
editor.chain().focus().toggleCodeBlock().run();
|
||||
} else if (isMultiline) {
|
||||
// if the selection is multiline, then also replace the text content with
|
||||
// a code block
|
||||
editor.chain().focus().deleteRange({ from, to }).insertContentAt(from, `\`\`\`\n${text}\n\`\`\``).run();
|
||||
} else {
|
||||
// if the selection is single line, then simply convert it into inline
|
||||
// code
|
||||
editor.chain().focus().toggleCode().run();
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -194,7 +142,7 @@ export const insertImageCommand = (
|
||||
if (range) editor.chain().focus().deleteRange(range).run();
|
||||
const input = document.createElement("input");
|
||||
input.type = "file";
|
||||
input.accept = "image/*";
|
||||
input.accept = ".jpeg, .jpg, .png, .webp, .svg";
|
||||
input.onchange = async () => {
|
||||
if (input.files?.length) {
|
||||
const file = input.files[0];
|
||||
|
||||
@@ -110,6 +110,11 @@ ul[data-type="taskList"] li > label input[type="checkbox"]:checked:hover {
|
||||
}
|
||||
}
|
||||
|
||||
/* the p tag just after the ul tag */
|
||||
ul[data-type="taskList"] + p {
|
||||
margin-top: 0.4rem !important;
|
||||
}
|
||||
|
||||
ul[data-type="taskList"] li > label input[type="checkbox"] {
|
||||
position: relative;
|
||||
-webkit-appearance: none;
|
||||
@@ -152,6 +157,10 @@ ul[data-type="taskList"] li > label input[type="checkbox"] {
|
||||
}
|
||||
}
|
||||
|
||||
ul[data-type="taskList"] li > div > p {
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
ul[data-type="taskList"] li[data-checked="true"] > div > p {
|
||||
color: rgb(var(--color-text-400));
|
||||
text-decoration: line-through;
|
||||
|
||||
@@ -15,4 +15,5 @@ export interface EditorRefApi extends EditorReadOnlyRefApi {
|
||||
isMenuItemActive: (itemName: EditorMenuItemNames) => boolean;
|
||||
onStateChange: (callback: () => void) => () => void;
|
||||
setFocusAtPosition: (position: number) => void;
|
||||
isEditorReadyToDiscard: () => boolean;
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ export const CustomCodeInlineExtension = Mark.create<CodeOptions>({
|
||||
return {
|
||||
HTMLAttributes: {
|
||||
class:
|
||||
"rounded bg-custom-background-80 px-1 py-[2px] font-mono font-medium text-orange-500 border-[0.5px] border-custom-border-200 text-sm",
|
||||
"rounded bg-custom-background-80 px-1 py-[2px] font-mono font-medium text-orange-500 border-[0.5px] border-custom-border-200",
|
||||
spellcheck: "false",
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { NodeViewWrapper, NodeViewContent } from "@tiptap/react";
|
||||
import { common, createLowlight } from "lowlight";
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user