Compare commits

...

154 Commits

Author SHA1 Message Date
Aaron Reisman
409b2b78f1 chore: enum to object lookups 2025-06-12 23:00:11 -07:00
sriram veeraghanta
ebc2bdcd3a feat: adding build process to logger package using tsup #7210 2025-06-13 01:50:44 +05:30
Aaron
11b222ece8 chore(deps): update TypeScript version across multiple packages to 5.8.3 (#7209) 2025-06-13 01:40:27 +05:30
JayashTripathy
c1a078ef3f [WEB-4246] Analytics minor improvements (#7194)
* chore: updated label for epics

* chore: improved export logic

* refactor: move csvConfig to export.ts and clean up export logic

* refactor: remove unused CSV export logic from WorkItemsInsightTable component

* refactor: streamline data handling in InsightTable component for improved rendering

* feat: add translation for "No. of {entity}" and update priority chart y-axis label to use new translation

* refactor: cleaned up some component and added utilitites

* feat: add "at_risk" translation to multiple languages in translations.json files

* refactor: update TrendPiece component to use new status variants for analytics

* fix: adjust TrendPiece component logic for on-track and off-track status

* refactor: use nullish coalescing operator for yAxis.dx in line and scatter charts

* feat: add "at_risk" translation to various languages in translations.json files

* feat: add "no_of" translation to various languages in translations.json files

* feat: update "at_risk" translation in Ukrainian, Vietnamese, and Chinese locales in translations.json files
2025-06-12 21:15:09 +05:30
Akshita Goyal
ad11a34efc [WEB-4236] fix: divided settings scroll for sidebar and main content (#7201)
* fix: divided settings scroll for sidebar and main content

* fix: handled icons

* fix: mobile css
2025-06-11 16:11:40 +05:30
Prateek Shourya
9c28db8b7b [WEB-4300] improvement: add allowedProjectIds to create work item modal (#7195) 2025-06-10 20:32:39 +05:30
dependabot[bot]
32d5fea3d3 chore(deps): bump requests (#7193)
Bumps the pip group with 1 update in the /apiserver/requirements directory: [requests](https://github.com/psf/requests).


Updates `requests` from 2.32.2 to 2.32.4
- [Release notes](https://github.com/psf/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/main/HISTORY.md)
- [Commits](https://github.com/psf/requests/compare/v2.32.2...v2.32.4)

---
updated-dependencies:
- dependency-name: requests
  dependency-version: 2.32.4
  dependency-type: direct:production
  dependency-group: pip
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-06-10 17:39:48 +05:30
Prateek Shourya
6adc721b34 [WEB-4283] fix: update group key handling in issue store utilities for state groups (#7191)
* fix: update group key handling in issue store utilities for state groups

- Introduced a new function to determine the default group key based on the provided groupByKey.
- Updated references to use the new function for improved clarity and maintainability.
- Adjusted the mapping for "state_detail.group" in the ISSUE_GROUP_BY_KEY to ensure consistency.
- Enhanced the getArrayStringArray method to handle group values more effectively.

* refactor: clean up filters constants
2025-06-10 13:56:42 +05:30
Anmol Singh Bhatia
531748dcc3 [WEB-4288] fix: auth page tab index (#7189)
* fix: auth page tab index

* chore: code refactor
2025-06-10 01:47:59 +05:30
Sangmin Ahn
9965f48ba7 fix: prevent prematurely triggered Japanese label creation (#7084) 2025-06-09 16:07:42 +05:30
Saurabh Kumar
d15d7549f7 [SILO-303] Add external id and external source in project model #7182 2025-06-09 16:02:09 +05:30
Vamsi Krishna
8fcffd2338 [WEB-4196]fix: sub work item copy link message #7186 2025-06-09 15:46:57 +05:30
Vamsi Krishna
07e937cd8e [WEB-4094]chore: workspace notifications refactor (#7061)
* chore: workspace notifications refactor

* fix: url params

* fix: added null checks to avoid run time errors

* fix: notifications header color fix
2025-06-09 15:33:57 +05:30
Farahat Abdrabouh
1f1b421735 Docs: Correct numeric values in contributing guide #7184 2025-06-09 13:22:07 +05:30
sriram veeraghanta
5a43ec8411 chore: turbo repo version upgrade 2025-06-09 13:20:07 +05:30
sriram veeraghanta
c86e7e02bc chore: upgrade tar-fs package to fix vulnerabilities 2025-06-09 13:19:14 +05:30
sriram veeraghanta
d91d7a2f60 chore: tar-fs patch upgrade 2025-06-09 12:58:18 +05:30
sriram veeraghanta
b3b285b1e5 chore: upgrade django version to 4.2.22 2025-06-09 12:49:26 +05:30
Prateek Shourya
11debee402 fix: build errors related to project member list (#7185) 2025-06-09 00:31:27 +05:30
Vamsi Krishna
1608e4f122 [WEB-3374]feat: added merge date display (#7141)
* feat: added merge date display

* chore: moved formatter ti utils

* chore: removed unwanted props
2025-06-08 23:47:08 +05:30
Vamsi Krishna
edeeee1227 [WEB-4063]chore: updated work item email template (#7044)
* chore: updated work item email template

* chore: passed dynamic value for email template

---------

Co-authored-by: NarayanBavisetti <narayan3119@gmail.com>
2025-06-08 23:46:12 +05:30
sriram veeraghanta
9ff238816b sync: canary changes to preview 2025-06-06 18:06:51 +05:30
sriram veeraghanta
6bd5caf008 chore: upgrade package version 2025-06-06 17:50:31 +05:30
sriram veeraghanta
c021aff58f chore: django version upgrade 2025-06-06 16:04:34 +05:30
sriram veeraghanta
683be55883 chore: upgrade nextjs version 2025-06-06 16:02:56 +05:30
Manish Gupta
970ce8cf26 [INFRA-183] feat: add restore-airgapped script to build workflow (#7170)
* [WEB-4260] chore: add restore-airgapped script to build workflow

* docs: update restore instructions in README for self-hosted and commercial air-gapped versions

* fix: update restore script filename and improve error handling in restore-airgapped script
2025-06-06 15:24:43 +05:30
Manish Gupta
cbbe1a4e4d refactor: Enhance backup and restore scripts for container data (#7055)
* refactor: enhance backup and restore scripts for container data management

* fix: ensure proper quoting in backup script to handle paths with spaces

* fix: ensure backup directory is only removed if tar command succeeds

* CodeRabbit fixes
2025-06-06 15:24:43 +05:30
Manish Gupta
6a74677cc9 fix: update API service startup check to use HTTP request instead of logs (#7054) 2025-06-06 15:24:43 +05:30
sriram veeraghanta
f6ea4f931d Merge branch 'canary' of github.com:makeplane/plane into preview 2025-06-06 15:23:10 +05:30
Aaryan Khandelwal
950fcfdb40 [WIKI-391] chore: handle deactivated user display name in version history #7171 2025-06-06 15:04:00 +05:30
Bavisetti Narayan
053c895120 [WEB 4252] chore: updated the favicon request for work item link (#7173)
* chore: added the favicon to link

* chore: added none validation for soup
2025-06-06 15:02:00 +05:30
Aaryan Khandelwal
245167e8aa refactor: unused components, hooks, constants (#7157)
* refactor: remove unused dashboard components and fetch keys

* refactor: remove unused hooks and wrappers

* chore: remove unused function
2025-06-06 14:09:56 +05:30
Vamsi Krishna
6be3f0ea73 [WEB-4208]chore: refactored work item quick actions (#7136)
* chore: refactored work item quick actions

* chore: update event handling for menu

* chore: reverted unwanted changes

* fix: update archive copy link

* chore: handled undefined function implementation
2025-06-06 13:21:00 +05:30
JayashTripathy
14d2d69120 [WEB-4230] refactor: Analytics code refacor, Removal of nivo charts dependencies and translations (#7131)
* chore: added code split for the analytics store

* chore: done some refactor

* refactor: update entity keys in analytics and translations

* chore: updated the translations

* refactor: simplify AnalyticsStoreV2 class by removing unnecessary constructor

* feat: add AnalyticsStoreV2 class and interface for enhanced analytics functionality

* feat: enhance WorkItemsModal and analytics store with isEpic functionality

* feat: integrate isEpic state into TotalInsights and WorkItemsModal components

* refactor: remove isEpic state from WorkItemsModalMainContent component

* refactor: removed old  analytics components and related services

* refactor: new analytics

* refactor: removed all nivo chart dependencies

* chore: resolved coderabbit comments

* fix: update processUrl to handle custom-work-items in peek view

* feat: implement CSV export functionality in InsightTable component

* feat: enhance analytics service with filter parameters and improve data handling in InsightTable

* feat: add new translation keys for various statuses across multiple languages

* [WEB-4246] fix: enhance analytics components to include 'isEpic' parameter for improved data fetching

* chore: update yarn.lock to remove deprecated @nivo packages and clean up unused dependencies
2025-06-06 01:53:38 +05:30
Anmol Singh Bhatia
570a9e319e [WEB-4257] chore: user profile setting options updated #7166 2025-06-06 01:47:31 +05:30
Anmol Singh Bhatia
469a027bb6 [WEB-4274] fix: metadata base url warning #7175 2025-06-05 22:51:56 +05:30
Prateek Shourya
8c99a7df88 [WEB-4273] fix: plans comparison scroll issue (#7176) 2025-06-05 22:51:05 +05:30
Prateek Shourya
f34f078bd2 [WEB-4272] fix: remove duplicate CommandPalette instances from settings layouts to prevent modal conflicts (#7174) 2025-06-05 20:48:36 +05:30
Anmol Singh Bhatia
0fe2549bc6 [WEB-4256] chore: add og image and update meta tags for social media compatibility (#7165)
* chore: og image added

* chore: meta config for cross-platform support
2025-06-05 19:32:11 +05:30
Prateek Shourya
118964de01 [WEB-4254] fix: ensure user details are available in project member details computation (#7162) 2025-06-05 19:31:07 +05:30
Manish Gupta
9f37f1ef0e [INFRA-183] feat: add restore-airgapped script to build workflow (#7170)
* [WEB-4260] chore: add restore-airgapped script to build workflow

* docs: update restore instructions in README for self-hosted and commercial air-gapped versions

* fix: update restore script filename and improve error handling in restore-airgapped script
2025-06-05 17:27:57 +05:30
Prateek Shourya
986f29d1f2 [WEB-4253] improvement: plan card enhancements (#7168)
* [WEB-4253] improvement: plan card enhancements

* improvement: pricing changes
2025-06-05 14:37:26 +05:30
Aaryan Khandelwal
1113f9fc19 [WIKI-412] regression: drop plugin logic #7161 2025-06-04 19:07:49 +05:30
Prateek Shourya
ef3ec7274c [WEB-4253] improvement: minor enhancements to billing page (#7160) 2025-06-04 17:29:45 +05:30
Akshita Goyal
a0a45b7916 [WEB-4249] fix: settings header css + cta on error page + project member list (#7159)
* fix: settings header css + cta on error page

* [WEB-4249] fix: filter out inactive workspace members from project member list

---------

Co-authored-by: Prateek Shourya <prateekshourya29@gmail.com>
2025-06-04 16:38:35 +05:30
Aaryan Khandelwal
2792d48288 [WIKI-412] chore: improved rich text editor extensions handling (#7158)
* chore: code split for rich text editor extensions

* chore: update type

* chore: add missing prop
2025-06-04 15:32:54 +05:30
Anmol Singh Bhatia
b2ccca0567 [WEB-3931] chore: maintenance page ux copy (#7135)
* chore: maintenance ux copy translation added

* chore: maintenance ux copy updated

* chore: code refactor
2025-06-04 13:37:58 +05:30
Prateek Shourya
2e822b38e4 [WEB-4240] chore: bump local db version to 1.3 #7154 2025-06-04 13:01:29 +05:30
JayashTripathy
e570fe404f [WEB-4182] Fix work item links error messages (#7122)
* fix: backend error message toast when getting error

* fix: toast in small screens
2025-06-03 22:18:26 +05:30
Aaryan Khandelwal
48b613ae66 [WIKI-410] chore: editor translation files #7156 2025-06-03 22:13:56 +05:30
Prateek Shourya
e70105235b [WEB-4245] improvement: minor enhancements to project members settings page (#7153) 2025-06-03 15:09:54 +05:30
Nikhil
7766e8b5cf [WEB-3998]: clean up imports and remove cache decorators in workspace views to avoid stale data on browser cache #7150 2025-06-03 13:36:52 +05:30
Akshita Goyal
16d63abcdc [WEB-3998] fix: minor empty states changes + refactoring (#7151) 2025-06-02 15:50:57 +05:30
M. Palanikannan
0568b8d583 regression: building utils back to run live server (#7149) 2025-06-02 13:32:34 +05:30
Quang Hung Pham
64da29b0d9 chore: add select all/deselect all functionality when adding existing work item (#7045)
* chore: add select all/deselect all functionality

* chore: update button display logic by CR
2025-06-02 13:30:31 +05:30
Zero King
7c336a65c4 buid: add .venv to .dockerignore (#7146) 2025-05-31 12:32:25 +05:30
sriram veeraghanta
2242a85e5c chore: nextjs upgrade 2025-05-30 21:12:02 +05:30
Aaryan Khandelwal
323920a358 [WIKI-399] fix: add favorite action to page header #7144 2025-05-30 20:58:46 +05:30
Aaryan Khandelwal
151fc8389e [WIKI-181] chore: asset check endpoint added #7140 2025-05-30 20:58:06 +05:30
sriram veeraghanta
0f828fd5e0 chore: core component fixes 2025-05-30 20:57:35 +05:30
Prateek Shourya
67cbe94d4a [WEB-3964] refactor: permission layer (#7094)
* refactor: permission layer

* refactor: add original_role to project member serializer

* chore: minor fixes related to permission layer

* fix: strict type checking while checking user permissions
2025-05-30 19:57:07 +05:30
sriram veeraghanta
322af8c436 [WEB-4223] fix: remove build process from utils package #7138 2025-05-30 18:48:18 +05:30
Sangeetha
41c2aefad4 [WEB-3998] feat: settings page revamp (#6959)
* chore: return workspace name and logo in profile settings api

* chore: remove unwanted fields

* fix: backend

* feat: workspace settings

* feat: workspce settings + layouting

* feat: profile + workspace settings ui

* chore: project settings + refactoring

* routes

* fix: handled no project

* fix: css + build

* feat: profile settings internal screens upgrade

* fix: workspace settings internal screens

* fix: external scrolling allowed

* fix: css

* fix: css

* fix: css

* fix: preferences settings

* fix: css

* fix: mobile interface

* fix: profile redirections

* fix: dark theme

* fix: css

* fix: css

* feat: scroll

* fix: refactor

* fix: bug fixes

* fix: refactor

* fix: css

* fix: routes

* fix: first day of the week

* fix: scrolling

* fix: refactoring

* fix: project -> projects

* fix: refactoring

* fix: refactor

* fix: no authorized view consistency

* fix: folder structure

* fix: revert

* fix: handled redirections

* fix: scroll

* fix: deleted old routes

* fix: empty states

* fix: headings

* fix: settings description

* fix: build

---------

Co-authored-by: gakshita <akshitagoyal1516@gmail.com>
Co-authored-by: Akshita Goyal <36129505+gakshita@users.noreply.github.com>
2025-05-30 18:47:33 +05:30
sriram veeraghanta
445c819fbd [WEB-4172] feat: Crawl work item links for title and favicon (#7117)
* feat: added a python bg task to crawl work item links for title and description

* fix: return meta_data in the response

* fix: add validation for accessing IP ranges

* fix: remove json.dumps

* fix: handle exception by returning None

* refactor: call find_favicon_url inside fetch_and_encode_favicon function

* chore: type hints

* fix: Handle None

* fix: remove print statementsg

* chore: added favicon and title of links

* fix: return null if no title found

* Update apiserver/plane/bgtasks/work_item_link_task.py

Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>

* fix: remove exception handling

* fix: reduce timeout seconds

* fix: handle timeout exception

* fix: remove request timeout handling

* feat: add Link icon to issue detail links and update rendering logic

* fix: use logger for exception

---------

Co-authored-by: sangeethailango <sangeethailango21@gmail.com>
Co-authored-by: JayashTripathy <jayashtripathy371@gmail.com>
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
2025-05-30 18:44:53 +05:30
Aaryan Khandelwal
046a8a1bcf [WEB-4189] chore: add tailwind container-queries plugin #7125 2025-05-30 18:41:12 +05:30
Akshita Goyal
099a1cc12b [WEB-3863] fix: links error handling #7126 2025-05-30 18:24:01 +05:30
Sangeetha
a0a697401b [WEB-3787] fix: project joining date (#7127)
* fix: return project joining date

* fix: added project's joining date

* fix: set created_at as read_only_fields

---------

Co-authored-by: gakshita <akshitagoyal1516@gmail.com>
2025-05-30 18:23:19 +05:30
Aaryan Khandelwal
cb92108bf4 [WEB-4197] chore: auth forms semantics and accessibility #7128 2025-05-30 18:22:20 +05:30
Aaryan Khandelwal
01b685ea57 [WIKI-181] refactor: invalid file handling #7139 2025-05-30 18:18:05 +05:30
Vipin Chaudhary
b16a585102 [WIKI-343] [WIKI-312] Fix: html characters (#7049)
* fix: handle symbols and space

* chore: refactor
2025-05-30 18:17:03 +05:30
sriram veeraghanta
4a97d7c28c fix: adding url validations for workspace name and user name 2025-05-29 17:53:48 +05:30
Aaryan Khandelwal
141cb17e8a fix: Optimize image uploads in Editor (#7129)
* fix: memoize file upload functions

* chore: update extension name

* chore: update notation

* chore: resolve chokidar package

* fix: spelling mistakes
2025-05-28 19:03:14 +05:30
sriram veeraghanta
26b62c4a70 fix: tsup version 8.4.0 2025-05-28 02:17:23 +05:30
Aaryan Khandelwal
e388a9a279 [WIKI-181] refactor: file plugins and types (#7074)
* refactor: file plugins and types

* refactor: image extension storage types

* chore: update meta tag name

* chore: extension fileset storage key

* fix: build errors

* refactor: utility extension

* refactor: file plugins

* chore: remove standalone plugin extensions

* chore: refactoring out onCreate into a common utility

* refactor: work item embed extension

* chore: use extension enums

* fix: errors and warnings

* refactor: rename extension files

* fix: tsup reloading issue

* fix: image upload types and heading types

* fix: file plugin object reference

* fix: iseditable is hard coded

* fix: image extension names

* fix: collaborative editor editable value

* chore: add constants for editor meta as well

---------

Co-authored-by: Palanikannan M <akashmalinimurugu@gmail.com>
2025-05-28 01:43:01 +05:30
Aaryan Khandelwal
a3a580923c [WEB-4166] chore: projects app sidebar accessibility (#7115)
* chore: add ARIA attributes

* chore: add missing translations

* chore: add accessibility translations for multiple languages and configured store according to it

* chore: refactor translation file handling and introduce TranslationFiles enum

* fix: accessibility issues in workspace sidebar

---------

Co-authored-by: JayashTripathy <jayashtripathy371@gmail.com>
Co-authored-by: Prateek Shourya <prateekshourya29@gmail.com>
2025-05-28 00:58:22 +05:30
Akshita Goyal
b4bc49971c [WEB-4130] fix: cycle charts minor optimizations (#7123) 2025-05-28 00:54:21 +05:30
dependabot[bot]
04c7c53e09 chore(deps): bump requests (#7120)
Bumps the pip group with 1 update in the /apiserver/requirements directory: [requests](https://github.com/psf/requests).


Updates `requests` from 2.31.0 to 2.32.2
- [Release notes](https://github.com/psf/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/main/HISTORY.md)
- [Commits](https://github.com/psf/requests/compare/v2.31.0...v2.32.2)

---
updated-dependencies:
- dependency-name: requests
  dependency-version: 2.32.2
  dependency-type: direct:production
  dependency-group: pip
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-05-26 19:45:15 +05:30
Dheeraj Kumar Ketireddy
78cc32765b [WEB-3707] pytest based test suite for apiserver (#7010)
* pytest bases tests for apiserver

* Trimmed spaces

* Updated .gitignore for pytest local files
2025-05-26 15:26:26 +05:30
JayashTripathy
4e485d6402 [WEB-4160] fix: close the context menu after select #7113 2025-05-26 15:24:13 +05:30
JayashTripathy
5a208cb1b9 [WEB-2403] fix: alignment of project states in collapsed view #7114 2025-05-26 15:23:39 +05:30
JayashTripathy
0eafbb698a [WEB-3494] fix: size of created at value #7112 2025-05-26 15:22:16 +05:30
sriram veeraghanta
193ae9bfc8 fix: yarn lock file 2025-05-26 14:58:26 +05:30
Vamsi Krishna
7cb5a9120a [WEB-4173]fix: fixed layout overflow issue #7119 2025-05-26 14:28:56 +05:30
Vamsi Krishna
84fc81dd98 [WEB-4118]fix: adjusted sub work item properties for a better visibility (#7079)
* fix: adjusted sub work item properties for a better visibility

* fix: removed projects from sub work item filters
2025-05-23 16:14:35 +05:30
JayashTripathy
2d0c0c7f8a [WEB-4115] fix: update issue count status query to handle null values #7080 2025-05-23 16:13:48 +05:30
JayashTripathy
5c9bdb1cea [WEB-4133] fix: analytics release bugs (#7086)
* fix: header text of insight table search

* fix: made the active project list scrollable

* chore: added xAxis label to table header

* chore: removed the intake issues

* fix: made the headerText necessary

---------

Co-authored-by: NarayanBavisetti <narayan3119@gmail.com>
Co-authored-by: sriram veeraghanta <veeraghanta.sriram@gmail.com>
2025-05-23 16:13:09 +05:30
Aaron Heckmann
f8ca1e46b1 [WEB-4098] feat: noindex/nofollow (#7088)
* feat: noindex/nofollow

- On login: nofollow
- On app pages: noindex, nofollow

https://app.plane.so/plane/browse/WEB-4098/

- https://nextjs.org/docs/app/api-reference/file-conventions/layout
- https://nextjs.org/docs/app/building-your-application/routing/route-groups#creating-multiple-root-layouts
- https://nextjs.org/docs/app/api-reference/functions/generate-metadata#link-relpreload

* chore: address PR feedback
2025-05-23 16:12:04 +05:30
Vamsi Krishna
a3b9152a9b [WEB-4123]feat: language support for sub-work item empty states #7092 2025-05-23 15:36:47 +05:30
Aaryan Khandelwal
5223bd01e8 [WEB-4153] chore: extend custom font family in tailwind config (#7093)
* chore: remove unwanted font family

* chore: add font family to extend object
2025-05-23 15:35:47 +05:30
Aaryan Khandelwal
6eb0b5ddb0 [WEB-4137] chore: restrict SVG file selection (#7095)
* chore: update accepted file mime types

* chore: update accepted file mime types
2025-05-23 15:33:56 +05:30
Anmol Singh Bhatia
cd200169b6 [WEB-4107] chore: redirect user to the newly created project view after creation #7098 2025-05-23 15:32:41 +05:30
Nikhil
037bb88b53 [WEB-4144] fix: api logger to handle content decode errors #7099 2025-05-23 15:31:40 +05:30
Bavisetti Narayan
643390e723 [WEB-4145] chore: added validation for project deletion #7101 2025-05-23 15:30:42 +05:30
Aaryan Khandelwal
731c4e8fcd [WEB-4161] fix: eslint config for library config file #7103 2025-05-23 15:29:37 +05:30
Prateek Shourya
6216ad77f4 [WEB-4146] fix: AI environment variables configuration in GodMode (#7104)
* [WEB-4146] fix: artificial intelligence environment variables configuration

* chore: update llm configuration keys
2025-05-23 15:06:58 +05:30
Bavisetti Narayan
9812129ad3 [WEB-4133] chore: optimised the analytics endpoints (#7105)
* chore: optimised the analytics endpoints

* chore: segregated peek view endpoints

* chore: added analytics values validation

* chore: added project validation

* chore: reverted the changes

---------

Co-authored-by: JayashTripathy <jayashtripathy371@gmail.com>
2025-05-23 15:05:57 +05:30
JayashTripathy
5226b17f90 [WEB-4159] feat: add 'restricted_entity' translation key across multiple languages #7106 2025-05-23 15:05:37 +05:30
Vamsi Krishna
b376e5300a [WEB-3155]fix: email notification comments overflow #7110 2025-05-23 15:04:50 +05:30
Prateek Shourya
4460529b37 [WEB-4154] fix: dropdown container classname (#7085)
* fix: dropdown container classname

* improvement: update string utils for joinWithConjunction

* improvement: add more string utils
2025-05-23 13:53:16 +05:30
Nikhil
0a8cc24da5 chore: add validation fields in users (#7102)
* chore: add validation fields in users

* chore: make is email valid default value False
2025-05-21 20:34:52 +05:30
Sangeetha
2f4aa843fc [WEB-4122] fix: estimate in project export #7091 2025-05-20 12:56:30 +05:30
sriram veeraghanta
cfac8ce350 fix: ruff file formatting based on config file pyproject (#7082) 2025-05-19 17:34:46 +05:30
sriram veeraghanta
75a11ba31a fix: polynomial regular expression used on uncontrolled data (#7083)
* fix: polynomial regular expression used on uncontrolled data

* fix: optimize the function to handle both operations
2025-05-19 17:14:26 +05:30
sriram veeraghanta
1fc3709731 chore: Strict Null Check in Admin app (#7081)
* chore: upgrade to latest version of turbo repo

* fix: tsconfig changes

* chore: adding format script to package json

* fix: formatting of files
2025-05-19 16:25:46 +05:30
Akshita Goyal
7e21618762 [WEB-3461] fix: profile activity rendering issue (#7059)
* fix: profile activity

* fix: icon

* fix: handled conversion case

* fix: handled conversion case
2025-05-19 15:20:57 +05:30
Aaryan Khandelwal
2d475491e9 [WEB-4117] refactor: work item widgets code split (#7078)
* refactor: work item widget code split

* fix: types
2025-05-19 15:20:40 +05:30
Aaryan Khandelwal
2a2feaf88e [WIKI-181] chore: editor extension storage utility code split (#7071)
* chore: storage extension code split

* chore: use storage extension utility
2025-05-19 13:12:52 +05:30
Anmol Singh Bhatia
e48b2da623 [WEB-4056] fix: archived work item validation #7060 2025-05-18 15:28:47 +05:30
Anmol Singh Bhatia
9c9952a823 [WEB-3866] fix: work item attachment activity #7062 2025-05-18 15:28:00 +05:30
Akshita Goyal
906ce8b500 [WEB-4104] fix: project loading state #7065 2025-05-18 15:19:05 +05:30
Anmol Singh Bhatia
6c483fad2f [WEB-4041] chore: modal outside click behaviour #7072 2025-05-18 15:18:09 +05:30
Bavisetti Narayan
5b776392bd chore: revamped the analytics for cycle and module in peek view. (#7075)
* chore: added cycles and modules in analytics peek view

* chore: added cycles and modules analytics

* chore: added project filter for work items

* chore: added a peekview flag and based on that table columns

* chore: added peek view

* chore: added check for display name

* chore: cleaned up some code

* chore: fixed export csv data

* chore: added distinct work items

* chore: assignee in peek view

* updated csv fields

* chore: updated workitems peek with assignee

* fix: removed type assersions for workspaceslug

* chore: added day wise filter in cycles and modules

* chore: added extra validations

---------

Co-authored-by: JayashTripathy <jayashtripathy371@gmail.com>
2025-05-17 17:11:26 +05:30
Aaryan Khandelwal
ba158d5d6e [WEB-4109] chore: remove analytics duration filter (#7073)
* chore: remove analytics duration filter

* removed subtitle from title and date_filter from service call

* chore: removed the date filter

* bottom text of insight trend card

* chore: changed issue manager

* fix: limited items in table

* fix: removed unnecessary props from data-table

---------

Co-authored-by: JayashTripathy <jayashtripathy371@gmail.com>
Co-authored-by: NarayanBavisetti <narayan3119@gmail.com>
2025-05-16 19:16:30 +05:30
JayashTripathy
084cc75726 [WEB-4092] fix:broken detailed empty state layout #7056 2025-05-14 18:01:36 +05:30
Nikhil
534f5c7dd0 [WEB-4088] fix: issue exports when cycles are not present (#7057)
* fix: issue exports when cycles are not present

* fix: type check
2025-05-14 18:00:49 +05:30
Manish Gupta
080cf70e3f refactor: Enhance backup and restore scripts for container data (#7055)
* refactor: enhance backup and restore scripts for container data management

* fix: ensure proper quoting in backup script to handle paths with spaces

* fix: ensure backup directory is only removed if tar command succeeds

* CodeRabbit fixes
2025-05-14 12:33:53 +05:30
Manish Gupta
4c3f7f27a5 fix: update API service startup check to use HTTP request instead of logs (#7054) 2025-05-14 10:02:21 +05:30
sriram veeraghanta
803f6cc62a chore: yarn lock file updates 2025-05-13 16:20:08 +05:30
Vamsi Krishna
3a6d0c11fb fix: set accordion to expand by default (#7053) 2025-05-13 16:18:13 +05:30
JayashTripathy
75d81f9e95 [WEB-3781] Analytics page enhancements (#7005)
* chore: analytics endpoint

* added anlytics v2

* updated status icons

* added area chart in workitems and en translations

* active projects

* chore: created analytics chart

* chore: validation errors

* improved radar-chart , added empty states , added projects summary

* chore: added a new graph in advance analytics

* integrated priority chart

* chore: added csv exporter

* added priority dropdown

* integrated created vs resolved chart

* custom x and y axis label in bar and area chart

* added wrapper styles to legends

* added filter components

* fixed temp data imports

* integrated filters in priority charts

* added label to priority chart and updated duration filter

* refactor

* reverted to void onchange

* fixed some contant exports

* fixed type issues

* fixed some type and build issues

* chore: updated the filtering logic for analytics

* updated default value to last_30_days

* percentage value whole number and added some rules for axis options

* fixed some translations

* added - custom tick for radar, calc of insight cards, filter labels

* chore: opitmised the analytics endpoint

* replace old analytics path with new , updated labels of insight card, done some store fixes

* chore: updated the export request

* Enhanced ProjectSelect to support multi-select, improved state management, and optimized data fetching and component structure.

* fix: round completion percentage calculation in ActiveProjectItem

* added empty states in project insights

* Added loader and empty state in created/resolved chart

* added loaders

* added icons in filters

* added custom colors in customised charts

* cleaned up some code

* added some responsiveness

* updated translations

* updated serrchbar for the table

* added work item modal in project analytics

* fixed some of the layput issues in the peek view

* chore: updated the base function for viewsets

* synced tab to url

* code cleanup

* chore: updated the export logic

* fixed project_ids filter

* added icon in projectdropdown

* updated export button position

* export csv and emptystates icons

* refactor

* code refactor

* updated loaders, moved color pallete to contants, added nullish collasece operator in neccessary places

* removed uneccessary cn

* fixed formatting issues

* fixed empty project_ids in payload

* improved null checks

* optimized charts

* modified relevant variables to observable.ref

* fixed the duration type

* optimized some code

* updated query key in project-insight

* updated query key in project-insight

* updated formatting

* chore: replaced analytics route with new one and done some optimizations

* removed the old analytics

---------

Co-authored-by: NarayanBavisetti <narayan3119@gmail.com>
2025-05-12 20:50:33 +05:30
Aaryan Khandelwal
0d5c7c6653 [WEB-4051] regression: update font size of comment editor #7048 2025-05-12 19:47:44 +05:30
Anmol Singh Bhatia
079c3a3a99 [WEB-3978] chore: cmd k search result redirection improvements (#7012)
* fix: work item tab highlight

* chore: projectListOpen state and toggle method added to command palette store

* chore: openProjectAndScrollToSidebar helper function and highlight keyframes added

* chore: SidebarProjectsListItem updated

* chore: openProjectAndScrollToSidebar implementation

* chore: code refactor

* chore: code refactor

* chore: code refactor

* chore: code refactor

* chore: code refactor

* chore: code refactor

* chore: code refactor
2025-05-12 19:15:39 +05:30
Sangeetha
5f8d5ea388 [WEB-4054] chore: search-issues endpoint code refactoring (#7029)
* chore: moved some code to seperate function

* fix: function name typo
2025-05-12 19:14:10 +05:30
Anmol Singh Bhatia
8613a80b16 [WEB-3523] feat: start of week preference (#7033)
* chore: startOfWeek constant and types updated

* chore: startOfWeek updated in profile store

* chore: StartOfWeekPreference added to profile appearance settings

* chore: calendar layout startOfWeek implementation

* chore: date picker startOfWeek implementation

* chore: gantt layout startOfWeek implementation

* chore: code refactor

* chore: code refactor

* chore: code refactor
2025-05-12 19:13:39 +05:30
Aaryan Khandelwal
dc16f2862e [WIKI-181] refactor: make file handling generic in editor (#7046)
* refactor: make file handling generic

* fix: useeffect dependency array

* chore: remove mime type to extension conversion
2025-05-12 18:37:36 +05:30
Vamsi Krishna
e68d344410 [WEB-4074]fix: removed sub-work item filters at nested levels #7047 2025-05-12 18:21:05 +05:30
Aaron Heckmann
26c8cba322 [WEB-4008] fix: handle when settings are None #7016
https://app.plane.so/plane/browse/WEB-4008/
2025-05-12 13:16:30 +05:30
Bavisetti Narayan
b435ceedfc [WEB-3782] chore: analytics endpoints (#6973)
* chore: analytics endpoint

* chore: created analytics chart

* chore: validation errors

* chore: added a new graph in advance analytics

* chore: added csv exporter

* chore: updated the filtering logic for analytics

* chore: opitmised the analytics endpoint

* chore: updated the base function for viewsets

* chore: updated the export logic

* chore: added type hints

* chore: added type hints
2025-05-12 13:15:17 +05:30
Sangeetha
13c46e0fdf [WEB-3987] chore: project export funtionality enhancement (#7002)
* chore: comment details of work item

* chore: attachment count and attachment name

* chore: issue link and subscriber count

* chore: list of assignees

* chore: asset_url as attachment_links

* chore: code refactor

* fix: cannot export Excel

* chore: remove print statements

* fix: filtering in list

* chore: optimize attachment_count and attachment_link query

* chore: optimize fetching issue details for multiple select

* chore: use Prefetch to avoid duplicates
2025-05-09 21:09:13 +05:30
sriram veeraghanta
02bccb44d6 chore: adding robots txt file for not indexing the server 2025-05-09 21:07:24 +05:30
Surya Prashanth
b5634f5fa1 chore: add disable_auto_set_user flag on base model save method (#7041)
- when disable_auto_set_user flag is set, user fields like created_by
are derived from payload instead of crum
2025-05-09 21:05:05 +05:30
Aaryan Khandelwal
64aae0a2ac [WEB-4051] fix: comment editor list items font size #7034 2025-05-09 18:49:43 +05:30
Henit Chobisa
a263bfc01f chore: added external id and source to page model (#7040)
* chore: added external id and source to page model

* chore: added migration

* fix: added blank field
2025-05-09 17:23:49 +05:30
Anmol Singh Bhatia
50082f0843 [WEB-4002] fix: sidebar tab highlight (#7011)
* fix: work item tab highlight

* chore: code refactor

* chore: code refactor

* chore: code refactor
2025-05-09 16:53:51 +05:30
Prateek Shourya
30db59534d [WEB-3985] feat: common postcss config and local fonts across all plane applications (#6998)
* [WEB-3985] feat: common postcss config and local fonts across all plane applications

* improvement: split fonts into a separate exports
2025-05-09 14:26:29 +05:30
Vamsi Krishna
e401c9d6e4 [WEB-4028] feat: sub work item filters and grouping (#6997)
* feat: added filters for sub issues

* feat: added list groups for sub issues

* chore: updated order for sub work item properties

* feat: filters for sub work items

* feat: added filtering and ordering at frontend

* chore: reverted backend filters

* feat: added empty states

* chore: code improvemnt

---------

Co-authored-by: sangeethailango <sangeethailango21@gmail.com>
2025-05-09 14:24:06 +05:30
Bavisetti Narayan
39b5736c83 [WEB-4057] chore: updated the logger for bgtasks #7025 2025-05-09 14:23:23 +05:30
Vamsi Krishna
2785419d12 [WEB-4052]fix: sub work item copy link (#7036)
* fix: sub work item copy link

* fix: copy url to clipboard
2025-05-09 14:22:34 +05:30
sriram veeraghanta
ac5b974d67 chore: Upgrade Django version to 4.2.21 2025-05-08 21:29:26 +05:30
Anmol Singh Bhatia
14ebaf0799 [WEB-3942] chore: intake url pattern (#7006)
* chore: intake url pattern updated

* chore: code refactor

* chore: removed unused components

---------

Co-authored-by: vamsikrishnamathala <matalav55@gmail.com>
2025-05-07 21:19:24 +05:30
Sangeetha
7cdb622663 [WEB-3930] chore: change source in-app to IN_APP #7008 2025-05-07 18:46:10 +05:30
JayashTripathy
855e4a3218 [WEB-4016] updated project and workitem form (#7019)
* updated project and workitem form

* added translation for other languages also

* Update packages/i18n/src/locales/zh-CN/translations.json

Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>

---------

Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
2025-05-07 18:44:04 +05:30
Anmol Singh Bhatia
d456767492 [WEB-3955] chore: work item parent select modal params #7021 2025-05-07 18:41:28 +05:30
Bavisetti Narayan
6faff1d556 [WEB-3877] fix: changed logic to calculate cycle duration (#7024)
* chore: cycle running days

* chore: removed the module filter
2025-05-07 18:40:37 +05:30
Aaryan Khandelwal
bc2936dcd3 [WEB-3906] fix: page table of content overlap with the page content #7018 2025-05-07 00:51:51 +05:30
Aaryan Khandelwal
d366ac1581 [WEB-2508] fix: page favorite item title mutation (#7020)
* fix: remove page favorite item title fallback value

* refactor: use nullish coalescing operator
2025-05-07 00:28:43 +05:30
Nikhil
0a01e0eb41 [WEB-4013] chore: correct live url #7014 2025-05-06 01:21:53 +05:30
Nikhil
b4cc2d83fe [WEB-4014] fix: check access when duplicating pages #7015 2025-05-06 01:20:33 +05:30
Nikhil
42e2b787f0 [WEB-4013]chore: publish login and standardize urls in common settings (#7013)
* chore: handling base path and urls

* chore: uniformize urls in common settings

* correct live url

* chore: use url join to correctly join urls

---------

Co-authored-by: sriram veeraghanta <veeraghanta.sriram@gmail.com>
2025-05-05 18:58:24 +05:30
sriram veeraghanta
461e099bbc release: v0.26.0 #6962 2025-04-28 18:24:37 +05:30
sriram veeraghanta
45e25ce18b release: v0.25.3 #6788 2025-03-21 17:26:55 +05:30
sriram veeraghanta
4d88dbaf49 release: v0.25.2 (#6736) 2025-03-11 16:01:20 +05:30
sriram veeraghanta
e61ff879c4 release: v0.25.1
* fix: issue activity for project id validation (#6668)

* fix: work item attachment count mutation (#6670)

* updated the action to modify the release build assets (#6669)

* feat: russian translation (#6666)

* chore: ru translation updated (#6672)

* fix: state drop down refactor

* fix: intake work item creation refactor

* fix: cleanup for deprecated functions

* fix: date range picker on cycles and modules list (#6676)

* fix: Handled workspace switcher closing on click

* fix: replaced date range picker with date picker at some places

* chore: add common translation keys (#6688)

* chore: add missing translation keys

* chore: add russian translation keys

* fix: issue activity task (#6689)

* changed github workflow action ubuntu version to `ubuntu-22.04` (#6683)

* chore: update russian translation (#6682)

* chore: update russian translation

* chore: rename issues to work items in russian translation

* [PE-275] chore: editor line spacing variables (#6678)

* chore: variable editor line spacing

* chore: variable list spacing

---------

Co-authored-by: Aaryan Khandelwal <aaryankhandu123@gmail.com>

* [WEB-3475] fix: cycle dates dropdown (#6690)

* fix: Handled workspace switcher closing on click

* fix: Cycle date picker

* fix: Made onSelect optional in range range component

* fix: module date picker (#6691)

* fix: Handled workspace switcher closing on click

* fix: reverted module date picker changes

* chore: extended sidebar improvement (#6693)

* feat: italian translations (#6692)

* Create translations.json - ITALIAN translation (#6667)

* chore: italian translation updated

* feat: italian translation added

* fix: module end date translation

---------

Co-authored-by: Nicolas Bossi <nicolasbossi@gmail.com>
Co-authored-by: gakshita <akshitagoyal1516@gmail.com>

* fix: attachment item created by (#6695)

* fix: module flicker issue on property updation (#6699)

* [WEB-3477] fix: mutation issue on moving work items for a manually ended cycle (#6696)

* fix: package version update

* fix: esbuild version fix

* fix: package license repliation

* [WEB-3488] improvement: assignee validation for work item creation (#6701)

* fix: work item assignee update validation (#6704)

---------

Co-authored-by: Nikhil <118773738+pablohashescobar@users.noreply.github.com>
Co-authored-by: Anmol Singh Bhatia <121005188+anmolsinghbhatia@users.noreply.github.com>
Co-authored-by: Manish Gupta <59428681+mguptahub@users.noreply.github.com>
Co-authored-by: Nikita Mitasov <32384814+ch4og@users.noreply.github.com>
Co-authored-by: Akshita Goyal <36129505+gakshita@users.noreply.github.com>
Co-authored-by: Aaryan Khandelwal <65252264+aaryan610@users.noreply.github.com>
Co-authored-by: Akshat Jain <akshatjain9782@gmail.com>
Co-authored-by: Lakhan Baheti <94619783+1akhanBaheti@users.noreply.github.com>
Co-authored-by: Aaryan Khandelwal <aaryankhandu123@gmail.com>
Co-authored-by: Nicolas Bossi <nicolasbossi@gmail.com>
Co-authored-by: gakshita <akshitagoyal1516@gmail.com>
Co-authored-by: Prateek Shourya <prateekshourya29@gmail.com>
2025-03-05 19:15:33 +05:30
sriram veeraghanta
adeb7d977d Merge pull request #6665 from makeplane/canary
fix: package version update
2025-02-24 20:40:25 +05:30
1041 changed files with 21070 additions and 13931 deletions

View File

@@ -2,6 +2,7 @@
*.pyc
.env
venv
.venv
node_modules/
**/node_modules/
npm-debug.log
@@ -14,4 +15,4 @@ build/
out/
**/out/
dist/
**/dist/
**/dist/

View File

@@ -290,5 +290,6 @@ jobs:
${{ github.workspace }}/deploy/selfhost/setup.sh
${{ github.workspace }}/deploy/selfhost/swarm.sh
${{ github.workspace }}/deploy/selfhost/restore.sh
${{ github.workspace }}/deploy/selfhost/restore-airgapped.sh
${{ github.workspace }}/deploy/selfhost/docker-compose.yml
${{ github.workspace }}/deploy/selfhost/variables.env

2
.gitignore vendored
View File

@@ -53,6 +53,8 @@ mediafiles
.env
.DS_Store
logs/
htmlcov/
.coverage
node_modules/
assets/dist/

View File

@@ -69,14 +69,14 @@ chmod +x setup.sh
docker compose -f docker-compose-local.yml up
```
5. Start web apps:
4. Start web apps:
```bash
yarn dev
```
6. Open your browser to http://localhost:3001/god-mode/ and register yourself as instance admin
7. Open up your browser to http://localhost:3000 then log in using the same credentials from the previous step
5. Open your browser to http://localhost:3001/god-mode/ and register yourself as instance admin
6. Open up your browser to http://localhost:3000 then log in using the same credentials from the previous step
Thats it! Youre all set to begin coding. Remember to refresh your browser if changes dont auto-reload. Happy contributing! 🎉

View File

@@ -26,16 +26,16 @@ export const InstanceAIForm: FC<IInstanceAIForm> = (props) => {
formState: { errors, isSubmitting },
} = useForm<AIFormValues>({
defaultValues: {
OPENAI_API_KEY: config["OPENAI_API_KEY"],
GPT_ENGINE: config["GPT_ENGINE"],
LLM_API_KEY: config["LLM_API_KEY"],
LLM_MODEL: config["LLM_MODEL"],
},
});
const aiFormFields: TControllerInputFormField[] = [
{
key: "GPT_ENGINE",
key: "LLM_MODEL",
type: "text",
label: "GPT_ENGINE",
label: "LLM Model",
description: (
<>
Choose an OpenAI engine.{" "}
@@ -49,12 +49,12 @@ export const InstanceAIForm: FC<IInstanceAIForm> = (props) => {
</a>
</>
),
placeholder: "gpt-3.5-turbo",
error: Boolean(errors.GPT_ENGINE),
placeholder: "gpt-4o-mini",
error: Boolean(errors.LLM_MODEL),
required: false,
},
{
key: "OPENAI_API_KEY",
key: "LLM_API_KEY",
type: "password",
label: "API key",
description: (
@@ -71,7 +71,7 @@ export const InstanceAIForm: FC<IInstanceAIForm> = (props) => {
</>
),
placeholder: "sk-asddassdfasdefqsdfasd23das3dasdcasd",
error: Boolean(errors.OPENAI_API_KEY),
error: Boolean(errors.LLM_API_KEY),
required: false,
},
];

View File

@@ -98,11 +98,7 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
key: "GITHUB_ORGANIZATION_ID",
type: "text",
label: "Organization ID",
description: (
<>
The organization github ID.
</>
),
description: <>The organization github ID.</>,
placeholder: "123456789",
error: Boolean(errors.GITHUB_ORGANIZATION_ID),
required: false,

View File

@@ -10,11 +10,13 @@ type Props = {
handleClose: () => void;
};
enum ESendEmailSteps {
SEND_EMAIL = "SEND_EMAIL",
SUCCESS = "SUCCESS",
FAILED = "FAILED",
}
const ESendEmailSteps = {
SEND_EMAIL: "SEND_EMAIL",
SUCCESS: "SUCCESS",
FAILED: "FAILED",
} as const;
type ESendEmailSteps = typeof ESendEmailSteps[keyof typeof ESendEmailSteps];
const instanceService = new InstanceService();

View File

@@ -3,18 +3,16 @@
import { ReactNode } from "react";
import { ThemeProvider, useTheme } from "next-themes";
import { SWRConfig } from "swr";
// ui
// plane imports
import { ADMIN_BASE_PATH, DEFAULT_SWR_CONFIG } from "@plane/constants";
import { Toast } from "@plane/ui";
import { resolveGeneralTheme } from "@plane/utils";
// constants
// helpers
// lib
import { InstanceProvider } from "@/lib/instance-provider";
import { StoreProvider } from "@/lib/store-provider";
import { UserProvider } from "@/lib/user-provider";
// styles
import "./globals.css";
import "@/styles/globals.css";
const ToastWithTheme = () => {
const { resolvedTheme } = useTheme();

View File

@@ -7,7 +7,7 @@ import { LogOut, UserCog2, Palette } from "lucide-react";
import { Menu, Transition } from "@headlessui/react";
// plane internal packages
import { API_BASE_URL } from "@plane/constants";
import {AuthService } from "@plane/services";
import { AuthService } from "@plane/services";
import { Avatar } from "@plane/ui";
import { getFileURL, cn } from "@plane/utils";
// hooks

View File

@@ -16,14 +16,16 @@ import { Banner, PasswordStrengthMeter } from "@/components/common";
const authService = new AuthService();
// error codes
enum EErrorCodes {
INSTANCE_NOT_CONFIGURED = "INSTANCE_NOT_CONFIGURED",
ADMIN_ALREADY_EXIST = "ADMIN_ALREADY_EXIST",
REQUIRED_EMAIL_PASSWORD_FIRST_NAME = "REQUIRED_EMAIL_PASSWORD_FIRST_NAME",
INVALID_EMAIL = "INVALID_EMAIL",
INVALID_PASSWORD = "INVALID_PASSWORD",
USER_ALREADY_EXISTS = "USER_ALREADY_EXISTS",
}
const EErrorCodes = {
INSTANCE_NOT_CONFIGURED: "INSTANCE_NOT_CONFIGURED",
ADMIN_ALREADY_EXIST: "ADMIN_ALREADY_EXIST",
REQUIRED_EMAIL_PASSWORD_FIRST_NAME: "REQUIRED_EMAIL_PASSWORD_FIRST_NAME",
INVALID_EMAIL: "INVALID_EMAIL",
INVALID_PASSWORD: "INVALID_PASSWORD",
USER_ALREADY_EXISTS: "USER_ALREADY_EXISTS",
} as const;
type EErrorCodes = typeof EErrorCodes[keyof typeof EErrorCodes];
type TError = {
type: EErrorCodes | undefined;
@@ -144,7 +146,7 @@ export const InstanceSetupForm: FC = (props) => {
{errorData.type &&
errorData?.message &&
![EErrorCodes.INVALID_EMAIL, EErrorCodes.INVALID_PASSWORD].includes(errorData.type) && (
!([EErrorCodes.INVALID_EMAIL, EErrorCodes.INVALID_PASSWORD] as EErrorCodes[]).includes(errorData.type) && (
<Banner type="error" message={errorData?.message} />
)}

View File

@@ -18,13 +18,15 @@ import { AuthBanner } from "../authentication";
const authService = new AuthService();
// error codes
enum EErrorCodes {
INSTANCE_NOT_CONFIGURED = "INSTANCE_NOT_CONFIGURED",
REQUIRED_EMAIL_PASSWORD = "REQUIRED_EMAIL_PASSWORD",
INVALID_EMAIL = "INVALID_EMAIL",
USER_DOES_NOT_EXIST = "USER_DOES_NOT_EXIST",
AUTHENTICATION_FAILED = "AUTHENTICATION_FAILED",
}
const EErrorCodes = {
INSTANCE_NOT_CONFIGURED: "INSTANCE_NOT_CONFIGURED",
REQUIRED_EMAIL_PASSWORD: "REQUIRED_EMAIL_PASSWORD",
INVALID_EMAIL: "INVALID_EMAIL",
USER_DOES_NOT_EXIST: "USER_DOES_NOT_EXIST",
AUTHENTICATION_FAILED: "AUTHENTICATION_FAILED",
} as const;
type EErrorCodes = typeof EErrorCodes[keyof typeof EErrorCodes];
type TError = {
type: EErrorCodes | undefined;

View File

@@ -20,13 +20,15 @@ import githubDarkModeImage from "@/public/logos/github-white.png";
import GitlabLogo from "@/public/logos/gitlab-logo.svg";
import GoogleLogo from "@/public/logos/google-logo.svg";
export enum EErrorAlertType {
BANNER_ALERT = "BANNER_ALERT",
INLINE_FIRST_NAME = "INLINE_FIRST_NAME",
INLINE_EMAIL = "INLINE_EMAIL",
INLINE_PASSWORD = "INLINE_PASSWORD",
INLINE_EMAIL_CODE = "INLINE_EMAIL_CODE",
}
export const EErrorAlertType = {
BANNER_ALERT: "BANNER_ALERT",
INLINE_FIRST_NAME: "INLINE_FIRST_NAME",
INLINE_EMAIL: "INLINE_EMAIL",
INLINE_PASSWORD: "INLINE_PASSWORD",
INLINE_EMAIL_CODE: "INLINE_EMAIL_CODE",
} as const;
export type EErrorAlertType = typeof EErrorAlertType[keyof typeof EErrorAlertType];
const errorCodeMessages: {
[key in EAdminAuthErrorCodes]: { title: string; message: (email?: string | undefined) => ReactNode };

View File

@@ -2,7 +2,7 @@ import set from "lodash/set";
import { observable, action, computed, makeObservable, runInAction } from "mobx";
// plane internal packages
import { EInstanceStatus, TInstanceStatus } from "@plane/constants";
import {InstanceService} from "@plane/services";
import { InstanceService } from "@plane/services";
import {
IInstance,
IInstanceAdmin,

View File

@@ -1 +1 @@
export * from "ce/components/authentication/authentication-modes";
export * from "ce/components/authentication/authentication-modes";

View File

@@ -1,7 +1,7 @@
{
"name": "admin",
"description": "Admin UI for Plane",
"version": "0.26.0",
"version": "0.26.1",
"license": "AGPL-3.0",
"private": true,
"scripts": {
@@ -10,6 +10,7 @@
"build": "next build",
"preview": "next build && next start",
"start": "next start",
"format": "prettier --write .",
"lint": "eslint . --ext .ts,.tsx",
"lint:errors": "eslint . --ext .ts,.tsx --quiet"
},
@@ -17,6 +18,7 @@
"@headlessui/react": "^1.7.19",
"@plane/constants": "*",
"@plane/hooks": "*",
"@plane/propel": "*",
"@plane/services": "*",
"@plane/types": "*",
"@plane/ui": "*",
@@ -29,7 +31,7 @@
"lucide-react": "^0.469.0",
"mobx": "^6.12.0",
"mobx-react": "^9.1.1",
"next": "^14.2.28",
"next": "^14.2.29",
"next-themes": "^0.2.1",
"postcss": "^8.4.38",
"react": "^18.3.1",
@@ -48,6 +50,6 @@
"@types/react-dom": "^18.2.18",
"@types/uuid": "^9.0.8",
"@types/zxcvbn": "^4.4.4",
"typescript": "5.3.3"
"typescript": "5.8.3"
}
}

View File

@@ -1,8 +1,2 @@
module.exports = {
plugins: {
"postcss-import": {},
"tailwindcss/nesting": {},
tailwindcss: {},
autoprefixer: {},
},
};
// eslint-disable-next-line @typescript-eslint/no-require-imports
module.exports = require("@plane/tailwind-config/postcss.config.js");

View File

@@ -1,5 +1,4 @@
@import url("https://fonts.googleapis.com/css2?family=Inter:wght@200;300;400;500;600;700;800&display=swap");
@import url("https://fonts.googleapis.com/css2?family=Material+Symbols+Rounded:opsz,wght,FILL,GRAD@48,400,0,0&display=swap");
@import "@plane/propel/styles/fonts";
@tailwind base;
@tailwind components;
@@ -60,23 +59,31 @@
--color-border-300: 212, 212, 212; /* strong border- 1 */
--color-border-400: 185, 185, 185; /* strong border- 2 */
--color-shadow-2xs: 0px 0px 1px 0px rgba(23, 23, 23, 0.06), 0px 1px 2px 0px rgba(23, 23, 23, 0.06),
--color-shadow-2xs:
0px 0px 1px 0px rgba(23, 23, 23, 0.06), 0px 1px 2px 0px rgba(23, 23, 23, 0.06),
0px 1px 2px 0px rgba(23, 23, 23, 0.14);
--color-shadow-xs: 0px 1px 2px 0px rgba(0, 0, 0, 0.16), 0px 2px 4px 0px rgba(16, 24, 40, 0.12),
--color-shadow-xs:
0px 1px 2px 0px rgba(0, 0, 0, 0.16), 0px 2px 4px 0px rgba(16, 24, 40, 0.12),
0px 1px 8px -1px rgba(16, 24, 40, 0.1);
--color-shadow-sm: 0px 1px 4px 0px rgba(0, 0, 0, 0.01), 0px 4px 8px 0px rgba(0, 0, 0, 0.02),
0px 1px 12px 0px rgba(0, 0, 0, 0.12);
--color-shadow-rg: 0px 3px 6px 0px rgba(0, 0, 0, 0.1), 0px 4px 4px 0px rgba(16, 24, 40, 0.08),
--color-shadow-sm:
0px 1px 4px 0px rgba(0, 0, 0, 0.01), 0px 4px 8px 0px rgba(0, 0, 0, 0.02), 0px 1px 12px 0px rgba(0, 0, 0, 0.12);
--color-shadow-rg:
0px 3px 6px 0px rgba(0, 0, 0, 0.1), 0px 4px 4px 0px rgba(16, 24, 40, 0.08),
0px 1px 12px 0px rgba(16, 24, 40, 0.04);
--color-shadow-md: 0px 4px 8px 0px rgba(0, 0, 0, 0.12), 0px 6px 12px 0px rgba(16, 24, 40, 0.12),
--color-shadow-md:
0px 4px 8px 0px rgba(0, 0, 0, 0.12), 0px 6px 12px 0px rgba(16, 24, 40, 0.12),
0px 1px 16px 0px rgba(16, 24, 40, 0.12);
--color-shadow-lg: 0px 6px 12px 0px rgba(0, 0, 0, 0.12), 0px 8px 16px 0px rgba(0, 0, 0, 0.12),
--color-shadow-lg:
0px 6px 12px 0px rgba(0, 0, 0, 0.12), 0px 8px 16px 0px rgba(0, 0, 0, 0.12),
0px 1px 24px 0px rgba(16, 24, 40, 0.12);
--color-shadow-xl: 0px 0px 18px 0px rgba(0, 0, 0, 0.16), 0px 0px 24px 0px rgba(16, 24, 40, 0.16),
--color-shadow-xl:
0px 0px 18px 0px rgba(0, 0, 0, 0.16), 0px 0px 24px 0px rgba(16, 24, 40, 0.16),
0px 0px 52px 0px rgba(16, 24, 40, 0.16);
--color-shadow-2xl: 0px 8px 16px 0px rgba(0, 0, 0, 0.12), 0px 12px 24px 0px rgba(16, 24, 40, 0.12),
--color-shadow-2xl:
0px 8px 16px 0px rgba(0, 0, 0, 0.12), 0px 12px 24px 0px rgba(16, 24, 40, 0.12),
0px 1px 32px 0px rgba(16, 24, 40, 0.12);
--color-shadow-3xl: 0px 12px 24px 0px rgba(0, 0, 0, 0.12), 0px 16px 32px 0px rgba(0, 0, 0, 0.12),
--color-shadow-3xl:
0px 12px 24px 0px rgba(0, 0, 0, 0.12), 0px 16px 32px 0px rgba(0, 0, 0, 0.12),
0px 1px 48px 0px rgba(16, 24, 40, 0.12);
--color-shadow-4xl: 0px 8px 40px 0px rgba(0, 0, 61, 0.05), 0px 12px 32px -16px rgba(0, 0, 0, 0.05);

View File

@@ -1,13 +1,19 @@
{
"extends": "@plane/typescript-config/nextjs.json",
"compilerOptions": {
"plugins": [{ "name": "next" }],
"plugins": [
{
"name": "next"
}
],
"baseUrl": ".",
"paths": {
"@/*": ["core/*"],
"@/public/*": ["public/*"],
"@/plane-admin/*": ["ce/*"]
}
"@/plane-admin/*": ["ce/*"],
"@/styles/*": ["styles/*"]
},
"strictNullChecks": true
},
"include": ["next-env.d.ts", "next.config.js", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
"exclude": ["node_modules"]

25
apiserver/.coveragerc Normal file
View File

@@ -0,0 +1,25 @@
[run]
source = plane
omit =
*/tests/*
*/migrations/*
*/settings/*
*/wsgi.py
*/asgi.py
*/urls.py
manage.py
*/admin.py
*/apps.py
[report]
exclude_lines =
pragma: no cover
def __repr__
if self.debug:
raise NotImplementedError
if __name__ == .__main__.
pass
raise ImportError
[html]
directory = htmlcov

View File

@@ -1,6 +1,6 @@
{
"name": "plane-api",
"version": "0.26.0",
"version": "0.26.1",
"license": "AGPL-3.0",
"private": true,
"description": "API server powering Plane's backend"

View File

@@ -15,4 +15,4 @@ from .state import StateLiteSerializer, StateSerializer
from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
from .module import ModuleSerializer, ModuleIssueSerializer, ModuleLiteSerializer
from .intake import IntakeIssueSerializer
from .estimate import EstimatePointSerializer
from .estimate import EstimatePointSerializer

View File

@@ -160,12 +160,15 @@ class IssueSerializer(BaseSerializer):
else:
try:
# Then assign it to default assignee, if it is a valid assignee
if default_assignee_id is not None and ProjectMember.objects.filter(
member_id=default_assignee_id,
project_id=project_id,
role__gte=15,
is_active=True
).exists():
if (
default_assignee_id is not None
and ProjectMember.objects.filter(
member_id=default_assignee_id,
project_id=project_id,
role__gte=15,
is_active=True,
).exists()
):
IssueAssignee.objects.create(
assignee_id=default_assignee_id,
issue=issue,

View File

@@ -58,7 +58,7 @@ from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
from .base import BaseAPIView
from plane.utils.host import base_host
from plane.bgtasks.webhook_task import model_activity
from plane.bgtasks.work_item_link_task import crawl_work_item_link_title
class WorkspaceIssueAPIEndpoint(BaseAPIView):
"""
@@ -692,6 +692,9 @@ class IssueLinkAPIEndpoint(BaseAPIView):
serializer = IssueLinkSerializer(data=request.data)
if serializer.is_valid():
serializer.save(project_id=project_id, issue_id=issue_id)
crawl_work_item_link_title.delay(
serializer.data.get("id"), serializer.data.get("url")
)
link = IssueLink.objects.get(pk=serializer.data["id"])
link.created_by_id = request.data.get("created_by", request.user.id)
@@ -719,6 +722,9 @@ class IssueLinkAPIEndpoint(BaseAPIView):
serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
crawl_work_item_link_title.delay(
serializer.data.get("id"), serializer.data.get("url")
)
issue_activity.delay(
type="link.activity.updated",
requested_data=requested_data,

View File

@@ -53,6 +53,7 @@ def get_entity_model_and_serializer(entity_type):
}
return entity_map.get(entity_type, (None, None))
class UserFavoriteSerializer(serializers.ModelSerializer):
entity_data = serializers.SerializerMethodField()

View File

@@ -148,10 +148,13 @@ class ProjectMemberAdminSerializer(BaseSerializer):
fields = "__all__"
class ProjectMemberRoleSerializer(DynamicBaseSerializer):
class ProjectMemberRoleSerializer(DynamicBaseSerializer):
original_role = serializers.IntegerField(source='role', read_only=True)
class Meta:
model = ProjectMember
fields = ("id", "role", "member", "project")
fields = ("id", "role", "member", "project", "original_role", "created_at")
read_only_fields = ["original_role", "created_at"]
class ProjectMemberInviteSerializer(BaseSerializer):

View File

@@ -3,11 +3,22 @@ from rest_framework import serializers
# Module import
from plane.db.models import Account, Profile, User, Workspace, WorkspaceMemberInvite
from plane.utils.url import contains_url
from .base import BaseSerializer
class UserSerializer(BaseSerializer):
def validate_first_name(self, value):
if contains_url(value):
raise serializers.ValidationError("First name cannot contain a URL.")
return value
def validate_last_name(self, value):
if contains_url(value):
raise serializers.ValidationError("Last name cannot contain a URL.")
return value
class Meta:
model = User
# Exclude password field from the serializer
@@ -99,11 +110,16 @@ class UserMeSettingsSerializer(BaseSerializer):
workspace_member__member=obj.id,
workspace_member__is_active=True,
).first()
logo_asset_url = workspace.logo_asset.asset_url if workspace.logo_asset is not None else ""
return {
"last_workspace_id": profile.last_workspace_id,
"last_workspace_slug": (
workspace.slug if workspace is not None else ""
),
"last_workspace_name": (
workspace.name if workspace is not None else ""
),
"last_workspace_logo": (logo_asset_url),
"fallback_workspace_id": profile.last_workspace_id,
"fallback_workspace_slug": (
workspace.slug if workspace is not None else ""

View File

@@ -25,10 +25,12 @@ from plane.db.models import (
WorkspaceUserPreference,
)
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
from plane.utils.url import contains_url
# Django imports
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
import re
class WorkSpaceSerializer(DynamicBaseSerializer):
@@ -36,10 +38,21 @@ class WorkSpaceSerializer(DynamicBaseSerializer):
logo_url = serializers.CharField(read_only=True)
role = serializers.IntegerField(read_only=True)
def validate_name(self, value):
# Check if the name contains a URL
if contains_url(value):
raise serializers.ValidationError("Name must not contain URLs")
return value
def validate_slug(self, value):
# Check if the slug is restricted
if value in RESTRICTED_WORKSPACE_SLUGS:
raise serializers.ValidationError("Slug is not valid")
# Slug should only contain alphanumeric characters, hyphens, and underscores
if not re.match(r"^[a-zA-Z0-9_-]+$", value):
raise serializers.ValidationError(
"Slug can only contain letters, numbers, hyphens (-), and underscores (_)"
)
return value
class Meta:
@@ -148,7 +161,6 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
return value
def create(self, validated_data):
# Filtering the WorkspaceUserLink with the given url to check if the link already exists.
@@ -157,7 +169,7 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
workspace_user_link = WorkspaceUserLink.objects.filter(
url=url,
workspace_id=validated_data.get("workspace_id"),
owner_id=validated_data.get("owner_id")
owner_id=validated_data.get("owner_id"),
)
if workspace_user_link.exists():
@@ -173,10 +185,8 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
url = validated_data.get("url")
workspace_user_link = WorkspaceUserLink.objects.filter(
url=url,
workspace_id=instance.workspace_id,
owner=instance.owner
)
url=url, workspace_id=instance.workspace_id, owner=instance.owner
)
if workspace_user_link.exclude(pk=instance.id).exists():
raise serializers.ValidationError(
@@ -185,6 +195,7 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
return super().update(instance, validated_data)
class IssueRecentVisitSerializer(serializers.ModelSerializer):
project_identifier = serializers.SerializerMethodField()

View File

@@ -6,8 +6,14 @@ from plane.app.views import (
AnalyticViewViewset,
SavedAnalyticEndpoint,
ExportAnalyticsEndpoint,
AdvanceAnalyticsEndpoint,
AdvanceAnalyticsStatsEndpoint,
AdvanceAnalyticsChartEndpoint,
DefaultAnalyticsEndpoint,
ProjectStatsEndpoint,
ProjectAdvanceAnalyticsEndpoint,
ProjectAdvanceAnalyticsStatsEndpoint,
ProjectAdvanceAnalyticsChartEndpoint,
)
@@ -49,4 +55,34 @@ urlpatterns = [
ProjectStatsEndpoint.as_view(),
name="project-analytics",
),
path(
"workspaces/<str:slug>/advance-analytics/",
AdvanceAnalyticsEndpoint.as_view(),
name="advance-analytics",
),
path(
"workspaces/<str:slug>/advance-analytics-stats/",
AdvanceAnalyticsStatsEndpoint.as_view(),
name="advance-analytics-stats",
),
path(
"workspaces/<str:slug>/advance-analytics-charts/",
AdvanceAnalyticsChartEndpoint.as_view(),
name="advance-analytics-chart",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/advance-analytics/",
ProjectAdvanceAnalyticsEndpoint.as_view(),
name="project-advance-analytics",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/advance-analytics-stats/",
ProjectAdvanceAnalyticsStatsEndpoint.as_view(),
name="project-advance-analytics-stats",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/advance-analytics-charts/",
ProjectAdvanceAnalyticsChartEndpoint.as_view(),
name="project-advance-analytics-chart",
),
]

View File

@@ -12,6 +12,7 @@ from plane.app.views import (
AssetRestoreEndpoint,
ProjectAssetEndpoint,
ProjectBulkAssetEndpoint,
AssetCheckEndpoint,
)
@@ -81,5 +82,11 @@ urlpatterns = [
path(
"assets/v2/workspaces/<str:slug>/projects/<uuid:project_id>/<uuid:entity_id>/bulk/",
ProjectBulkAssetEndpoint.as_view(),
name="bulk-asset-update",
),
path(
"assets/v2/workspaces/<str:slug>/check/<uuid:asset_id>/",
AssetCheckEndpoint.as_view(),
name="asset-check",
),
]

View File

@@ -106,6 +106,7 @@ from .asset.v2 import (
AssetRestoreEndpoint,
ProjectAssetEndpoint,
ProjectBulkAssetEndpoint,
AssetCheckEndpoint,
)
from .issue.base import (
IssueListEndpoint,
@@ -199,6 +200,18 @@ from .analytic.base import (
ProjectStatsEndpoint,
)
from .analytic.advance import (
AdvanceAnalyticsEndpoint,
AdvanceAnalyticsStatsEndpoint,
AdvanceAnalyticsChartEndpoint,
)
from .analytic.project_analytics import (
ProjectAdvanceAnalyticsEndpoint,
ProjectAdvanceAnalyticsStatsEndpoint,
ProjectAdvanceAnalyticsChartEndpoint,
)
from .notification.base import (
NotificationViewSet,
UnreadNotificationEndpoint,

View File

@@ -0,0 +1,366 @@
from rest_framework.response import Response
from rest_framework import status
from typing import Dict, List, Any
from django.db.models import QuerySet, Q, Count
from django.http import HttpRequest
from django.db.models.functions import TruncMonth
from django.utils import timezone
from plane.app.views.base import BaseAPIView
from plane.app.permissions import ROLE, allow_permission
from plane.db.models import (
WorkspaceMember,
Project,
Issue,
Cycle,
Module,
IssueView,
ProjectPage,
Workspace,
CycleIssue,
ModuleIssue,
ProjectMember,
)
from plane.utils.build_chart import build_analytics_chart
from plane.utils.date_utils import (
get_analytics_filters,
)
class AdvanceAnalyticsBaseView(BaseAPIView):
def initialize_workspace(self, slug: str, type: str) -> None:
self._workspace_slug = slug
self.filters = get_analytics_filters(
slug=slug,
type=type,
user=self.request.user,
date_filter=self.request.GET.get("date_filter", None),
project_ids=self.request.GET.get("project_ids", None),
)
class AdvanceAnalyticsEndpoint(AdvanceAnalyticsBaseView):
def get_filtered_counts(self, queryset: QuerySet) -> Dict[str, int]:
def get_filtered_count() -> int:
if self.filters["analytics_date_range"]:
return queryset.filter(
created_at__gte=self.filters["analytics_date_range"]["current"][
"gte"
],
created_at__lte=self.filters["analytics_date_range"]["current"][
"lte"
],
).count()
return queryset.count()
def get_previous_count() -> int:
if self.filters["analytics_date_range"] and self.filters[
"analytics_date_range"
].get("previous"):
return queryset.filter(
created_at__gte=self.filters["analytics_date_range"]["previous"][
"gte"
],
created_at__lte=self.filters["analytics_date_range"]["previous"][
"lte"
],
).count()
return 0
return {
"count": get_filtered_count(),
# "filter_count": get_previous_count(),
}
def get_overview_data(self) -> Dict[str, Dict[str, int]]:
members_query = WorkspaceMember.objects.filter(
workspace__slug=self._workspace_slug, is_active=True
)
if self.request.GET.get("project_ids", None):
project_ids = self.request.GET.get("project_ids", None)
project_ids = [str(project_id) for project_id in project_ids.split(",")]
members_query = ProjectMember.objects.filter(
project_id__in=project_ids, is_active=True
)
return {
"total_users": self.get_filtered_counts(members_query),
"total_admins": self.get_filtered_counts(
members_query.filter(role=ROLE.ADMIN.value)
),
"total_members": self.get_filtered_counts(
members_query.filter(role=ROLE.MEMBER.value)
),
"total_guests": self.get_filtered_counts(
members_query.filter(role=ROLE.GUEST.value)
),
"total_projects": self.get_filtered_counts(
Project.objects.filter(**self.filters["project_filters"])
),
"total_work_items": self.get_filtered_counts(
Issue.issue_objects.filter(**self.filters["base_filters"])
),
"total_cycles": self.get_filtered_counts(
Cycle.objects.filter(**self.filters["base_filters"])
),
"total_intake": self.get_filtered_counts(
Issue.objects.filter(**self.filters["base_filters"]).filter(
issue_intake__status__in=["-2", "0"]
)
),
}
def get_work_items_stats(self) -> Dict[str, Dict[str, int]]:
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
return {
"total_work_items": self.get_filtered_counts(base_queryset),
"started_work_items": self.get_filtered_counts(
base_queryset.filter(state__group="started")
),
"backlog_work_items": self.get_filtered_counts(
base_queryset.filter(state__group="backlog")
),
"un_started_work_items": self.get_filtered_counts(
base_queryset.filter(state__group="unstarted")
),
"completed_work_items": self.get_filtered_counts(
base_queryset.filter(state__group="completed")
),
}
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
def get(self, request: HttpRequest, slug: str) -> Response:
self.initialize_workspace(slug, type="analytics")
tab = request.GET.get("tab", "overview")
if tab == "overview":
return Response(
self.get_overview_data(),
status=status.HTTP_200_OK,
)
elif tab == "work-items":
return Response(
self.get_work_items_stats(),
status=status.HTTP_200_OK,
)
return Response({"message": "Invalid tab"}, status=status.HTTP_400_BAD_REQUEST)
class AdvanceAnalyticsStatsEndpoint(AdvanceAnalyticsBaseView):
def get_project_issues_stats(self) -> QuerySet:
# Get the base queryset with workspace and project filters
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
# Apply date range filter if available
if self.filters["chart_period_range"]:
start_date, end_date = self.filters["chart_period_range"]
base_queryset = base_queryset.filter(
created_at__date__gte=start_date, created_at__date__lte=end_date
)
return (
base_queryset.values("project_id", "project__name").annotate(
cancelled_work_items=Count("id", filter=Q(state__group="cancelled")),
completed_work_items=Count("id", filter=Q(state__group="completed")),
backlog_work_items=Count("id", filter=Q(state__group="backlog")),
un_started_work_items=Count("id", filter=Q(state__group="unstarted")),
started_work_items=Count("id", filter=Q(state__group="started")),
)
.order_by("project_id")
)
def get_work_items_stats(self) -> Dict[str, Dict[str, int]]:
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
return (
base_queryset
.values("project_id", "project__name")
.annotate(
cancelled_work_items=Count("id", filter=Q(state__group="cancelled")),
completed_work_items=Count("id", filter=Q(state__group="completed")),
backlog_work_items=Count("id", filter=Q(state__group="backlog")),
un_started_work_items=Count("id", filter=Q(state__group="unstarted")),
started_work_items=Count("id", filter=Q(state__group="started")),
)
.order_by("project_id")
)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
def get(self, request: HttpRequest, slug: str) -> Response:
self.initialize_workspace(slug, type="chart")
type = request.GET.get("type", "work-items")
if type == "work-items":
return Response(
self.get_work_items_stats(),
status=status.HTTP_200_OK,
)
return Response({"message": "Invalid type"}, status=status.HTTP_400_BAD_REQUEST)
class AdvanceAnalyticsChartEndpoint(AdvanceAnalyticsBaseView):
def project_chart(self) -> List[Dict[str, Any]]:
# Get the base queryset with workspace and project filters
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
date_filter = {}
# Apply date range filter if available
if self.filters["chart_period_range"]:
start_date, end_date = self.filters["chart_period_range"]
date_filter = {
"created_at__date__gte": start_date,
"created_at__date__lte": end_date,
}
total_work_items = base_queryset.filter(**date_filter).count()
total_cycles = Cycle.objects.filter(
**self.filters["base_filters"], **date_filter
).count()
total_modules = Module.objects.filter(
**self.filters["base_filters"], **date_filter
).count()
total_intake = Issue.objects.filter(
issue_intake__isnull=False, **self.filters["base_filters"], **date_filter
).count()
total_members = WorkspaceMember.objects.filter(
workspace__slug=self._workspace_slug, is_active=True, **date_filter
).count()
total_pages = ProjectPage.objects.filter(
**self.filters["base_filters"], **date_filter
).count()
total_views = IssueView.objects.filter(
**self.filters["base_filters"], **date_filter
).count()
data = {
"work_items": total_work_items,
"cycles": total_cycles,
"modules": total_modules,
"intake": total_intake,
"members": total_members,
"pages": total_pages,
"views": total_views,
}
return [
{
"key": key,
"name": key.replace("_", " ").title(),
"count": value or 0,
}
for key, value in data.items()
]
def work_item_completion_chart(self) -> Dict[str, Any]:
# Get the base queryset
queryset = (
Issue.issue_objects.filter(**self.filters["base_filters"])
.select_related("workspace", "state", "parent")
.prefetch_related(
"assignees", "labels", "issue_module__module", "issue_cycle__cycle"
)
)
workspace = Workspace.objects.get(slug=self._workspace_slug)
start_date = workspace.created_at.date().replace(day=1)
# Apply date range filter if available
if self.filters["chart_period_range"]:
start_date, end_date = self.filters["chart_period_range"]
queryset = queryset.filter(
created_at__date__gte=start_date, created_at__date__lte=end_date
)
# Annotate by month and count
monthly_stats = (
queryset.annotate(month=TruncMonth("created_at"))
.values("month")
.annotate(
created_count=Count("id"),
completed_count=Count("id", filter=Q(state__group="completed")),
)
.order_by("month")
)
# Create dictionary of month -> counts
stats_dict = {
stat["month"].strftime("%Y-%m-%d"): {
"created_count": stat["created_count"],
"completed_count": stat["completed_count"],
}
for stat in monthly_stats
}
# Generate monthly data (ensure months with 0 count are included)
data = []
# include the current date at the end
end_date = timezone.now().date()
last_month = end_date.replace(day=1)
current_month = start_date
while current_month <= last_month:
date_str = current_month.strftime("%Y-%m-%d")
stats = stats_dict.get(date_str, {"created_count": 0, "completed_count": 0})
data.append(
{
"key": date_str,
"name": date_str,
"count": stats["created_count"],
"completed_issues": stats["completed_count"],
"created_issues": stats["created_count"],
}
)
# Move to next month
if current_month.month == 12:
current_month = current_month.replace(
year=current_month.year + 1, month=1
)
else:
current_month = current_month.replace(month=current_month.month + 1)
schema = {
"completed_issues": "completed_issues",
"created_issues": "created_issues",
}
return {"data": data, "schema": schema}
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
def get(self, request: HttpRequest, slug: str) -> Response:
self.initialize_workspace(slug, type="chart")
type = request.GET.get("type", "projects")
group_by = request.GET.get("group_by", None)
x_axis = request.GET.get("x_axis", "PRIORITY")
if type == "projects":
return Response(self.project_chart(), status=status.HTTP_200_OK)
elif type == "custom-work-items":
queryset = (
Issue.issue_objects.filter(**self.filters["base_filters"])
.select_related("workspace", "state", "parent")
.prefetch_related(
"assignees", "labels", "issue_module__module", "issue_cycle__cycle"
)
)
# Apply date range filter if available
if self.filters["chart_period_range"]:
start_date, end_date = self.filters["chart_period_range"]
queryset = queryset.filter(
created_at__date__gte=start_date, created_at__date__lte=end_date
)
return Response(
build_analytics_chart(queryset, x_axis, group_by),
status=status.HTTP_200_OK,
)
elif type == "work-items":
return Response(
self.work_item_completion_chart(),
status=status.HTTP_200_OK,
)
return Response({"message": "Invalid type"}, status=status.HTTP_400_BAD_REQUEST)

View File

@@ -0,0 +1,421 @@
from rest_framework.response import Response
from rest_framework import status
from typing import Dict, Any
from django.db.models import QuerySet, Q, Count
from django.http import HttpRequest
from django.db.models.functions import TruncMonth
from django.utils import timezone
from datetime import timedelta
from plane.app.views.base import BaseAPIView
from plane.app.permissions import ROLE, allow_permission
from plane.db.models import (
Project,
Issue,
Cycle,
Module,
CycleIssue,
ModuleIssue,
)
from django.db import models
from django.db.models import F, Case, When, Value
from django.db.models.functions import Concat
from plane.utils.build_chart import build_analytics_chart
from plane.utils.date_utils import (
get_analytics_filters,
)
class ProjectAdvanceAnalyticsBaseView(BaseAPIView):
def initialize_workspace(self, slug: str, type: str) -> None:
self._workspace_slug = slug
self.filters = get_analytics_filters(
slug=slug,
type=type,
user=self.request.user,
date_filter=self.request.GET.get("date_filter", None),
project_ids=self.request.GET.get("project_ids", None),
)
class ProjectAdvanceAnalyticsEndpoint(ProjectAdvanceAnalyticsBaseView):
def get_filtered_counts(self, queryset: QuerySet) -> Dict[str, int]:
def get_filtered_count() -> int:
if self.filters["analytics_date_range"]:
return queryset.filter(
created_at__gte=self.filters["analytics_date_range"]["current"][
"gte"
],
created_at__lte=self.filters["analytics_date_range"]["current"][
"lte"
],
).count()
return queryset.count()
return {
"count": get_filtered_count(),
}
def get_work_items_stats(
self, project_id, cycle_id=None, module_id=None
) -> Dict[str, Dict[str, int]]:
"""
Returns work item stats for the workspace, or filtered by cycle_id or module_id if provided.
"""
base_queryset = None
if cycle_id is not None:
cycle_issues = CycleIssue.objects.filter(
**self.filters["base_filters"], cycle_id=cycle_id
).values_list("issue_id", flat=True)
base_queryset = Issue.issue_objects.filter(id__in=cycle_issues)
elif module_id is not None:
module_issues = ModuleIssue.objects.filter(
**self.filters["base_filters"], module_id=module_id
).values_list("issue_id", flat=True)
base_queryset = Issue.issue_objects.filter(id__in=module_issues)
else:
base_queryset = Issue.issue_objects.filter(
**self.filters["base_filters"], project_id=project_id
)
return {
"total_work_items": self.get_filtered_counts(base_queryset),
"started_work_items": self.get_filtered_counts(
base_queryset.filter(state__group="started")
),
"backlog_work_items": self.get_filtered_counts(
base_queryset.filter(state__group="backlog")
),
"un_started_work_items": self.get_filtered_counts(
base_queryset.filter(state__group="unstarted")
),
"completed_work_items": self.get_filtered_counts(
base_queryset.filter(state__group="completed")
),
}
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
def get(self, request: HttpRequest, slug: str, project_id: str) -> Response:
self.initialize_workspace(slug, type="analytics")
# Optionally accept cycle_id or module_id as query params
cycle_id = request.GET.get("cycle_id", None)
module_id = request.GET.get("module_id", None)
return Response(
self.get_work_items_stats(
cycle_id=cycle_id, module_id=module_id, project_id=project_id
),
status=status.HTTP_200_OK,
)
class ProjectAdvanceAnalyticsStatsEndpoint(ProjectAdvanceAnalyticsBaseView):
def get_project_issues_stats(self) -> QuerySet:
# Get the base queryset with workspace and project filters
base_queryset = Issue.issue_objects.filter(**self.filters["base_filters"])
# Apply date range filter if available
if self.filters["chart_period_range"]:
start_date, end_date = self.filters["chart_period_range"]
base_queryset = base_queryset.filter(
created_at__date__gte=start_date, created_at__date__lte=end_date
)
return (
base_queryset.values("project_id", "project__name")
.annotate(
cancelled_work_items=Count("id", filter=Q(state__group="cancelled")),
completed_work_items=Count("id", filter=Q(state__group="completed")),
backlog_work_items=Count("id", filter=Q(state__group="backlog")),
un_started_work_items=Count("id", filter=Q(state__group="unstarted")),
started_work_items=Count("id", filter=Q(state__group="started")),
)
.order_by("project_id")
)
def get_work_items_stats(
self, project_id, cycle_id=None, module_id=None
) -> Dict[str, Dict[str, int]]:
base_queryset = None
if cycle_id is not None:
cycle_issues = CycleIssue.objects.filter(
**self.filters["base_filters"], cycle_id=cycle_id
).values_list("issue_id", flat=True)
base_queryset = Issue.issue_objects.filter(id__in=cycle_issues)
elif module_id is not None:
module_issues = ModuleIssue.objects.filter(
**self.filters["base_filters"], module_id=module_id
).values_list("issue_id", flat=True)
base_queryset = Issue.issue_objects.filter(id__in=module_issues)
else:
base_queryset = Issue.issue_objects.filter(
**self.filters["base_filters"], project_id=project_id
)
return (
base_queryset.annotate(display_name=F("assignees__display_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(avatar=F("assignees__avatar"))
.annotate(
avatar_url=Case(
# If `avatar_asset` exists, use it to generate the asset URL
When(
assignees__avatar_asset__isnull=False,
then=Concat(
Value("/api/assets/v2/static/"),
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
Value("/"),
),
),
# If `avatar_asset` is None, fall back to using `avatar` field directly
When(
assignees__avatar_asset__isnull=True, then="assignees__avatar"
),
default=Value(None),
output_field=models.CharField(),
)
)
.values("display_name", "assignee_id", "avatar_url")
.annotate(
cancelled_work_items=Count(
"id", filter=Q(state__group="cancelled"), distinct=True
),
completed_work_items=Count(
"id", filter=Q(state__group="completed"), distinct=True
),
backlog_work_items=Count(
"id", filter=Q(state__group="backlog"), distinct=True
),
un_started_work_items=Count(
"id", filter=Q(state__group="unstarted"), distinct=True
),
started_work_items=Count(
"id", filter=Q(state__group="started"), distinct=True
),
)
.order_by("display_name")
)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
def get(self, request: HttpRequest, slug: str, project_id: str) -> Response:
self.initialize_workspace(slug, type="chart")
type = request.GET.get("type", "work-items")
if type == "work-items":
# Optionally accept cycle_id or module_id as query params
cycle_id = request.GET.get("cycle_id", None)
module_id = request.GET.get("module_id", None)
return Response(
self.get_work_items_stats(
project_id=project_id, cycle_id=cycle_id, module_id=module_id
),
status=status.HTTP_200_OK,
)
return Response({"message": "Invalid type"}, status=status.HTTP_400_BAD_REQUEST)
class ProjectAdvanceAnalyticsChartEndpoint(ProjectAdvanceAnalyticsBaseView):
def work_item_completion_chart(
self, project_id, cycle_id=None, module_id=None
) -> Dict[str, Any]:
# Get the base queryset
queryset = (
Issue.issue_objects.filter(**self.filters["base_filters"])
.filter(project_id=project_id)
.select_related("workspace", "state", "parent")
.prefetch_related(
"assignees", "labels", "issue_module__module", "issue_cycle__cycle"
)
)
if cycle_id is not None:
cycle_issues = CycleIssue.objects.filter(
**self.filters["base_filters"], cycle_id=cycle_id
).values_list("issue_id", flat=True)
cycle = Cycle.objects.filter(id=cycle_id).first()
if cycle and cycle.start_date:
start_date = cycle.start_date.date()
end_date = cycle.end_date.date()
else:
return {"data": [], "schema": {}}
queryset = cycle_issues
elif module_id is not None:
module_issues = ModuleIssue.objects.filter(
**self.filters["base_filters"], module_id=module_id
).values_list("issue_id", flat=True)
module = Module.objects.filter(id=module_id).first()
if module and module.start_date:
start_date = module.start_date
end_date = module.target_date
else:
return {"data": [], "schema": {}}
queryset = module_issues
else:
project = Project.objects.filter(id=project_id).first()
if project.created_at:
start_date = project.created_at.date().replace(day=1)
else:
return {"data": [], "schema": {}}
if cycle_id or module_id:
# Get daily stats with optimized query
daily_stats = (
queryset.values("created_at__date")
.annotate(
created_count=Count("id"),
completed_count=Count(
"id", filter=Q(issue__state__group="completed")
),
)
.order_by("created_at__date")
)
# Create a dictionary of existing stats with summed counts
stats_dict = {
stat["created_at__date"].strftime("%Y-%m-%d"): {
"created_count": stat["created_count"],
"completed_count": stat["completed_count"],
}
for stat in daily_stats
}
# Generate data for all days in the range
data = []
current_date = start_date
while current_date <= end_date:
date_str = current_date.strftime("%Y-%m-%d")
stats = stats_dict.get(
date_str, {"created_count": 0, "completed_count": 0}
)
data.append(
{
"key": date_str,
"name": date_str,
"count": stats["created_count"] + stats["completed_count"],
"completed_issues": stats["completed_count"],
"created_issues": stats["created_count"],
}
)
current_date += timedelta(days=1)
else:
# Apply date range filter if available
if self.filters["chart_period_range"]:
start_date, end_date = self.filters["chart_period_range"]
queryset = queryset.filter(
created_at__date__gte=start_date, created_at__date__lte=end_date
)
# Annotate by month and count
monthly_stats = (
queryset.annotate(month=TruncMonth("created_at"))
.values("month")
.annotate(
created_count=Count("id"),
completed_count=Count("id", filter=Q(state__group="completed")),
)
.order_by("month")
)
# Create dictionary of month -> counts
stats_dict = {
stat["month"].strftime("%Y-%m-%d"): {
"created_count": stat["created_count"],
"completed_count": stat["completed_count"],
}
for stat in monthly_stats
}
# Generate monthly data (ensure months with 0 count are included)
data = []
# include the current date at the end
end_date = timezone.now().date()
last_month = end_date.replace(day=1)
current_month = start_date
while current_month <= last_month:
date_str = current_month.strftime("%Y-%m-%d")
stats = stats_dict.get(
date_str, {"created_count": 0, "completed_count": 0}
)
data.append(
{
"key": date_str,
"name": date_str,
"count": stats["created_count"],
"completed_issues": stats["completed_count"],
"created_issues": stats["created_count"],
}
)
# Move to next month
if current_month.month == 12:
current_month = current_month.replace(
year=current_month.year + 1, month=1
)
else:
current_month = current_month.replace(month=current_month.month + 1)
schema = {
"completed_issues": "completed_issues",
"created_issues": "created_issues",
}
return {"data": data, "schema": schema}
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
def get(self, request: HttpRequest, slug: str, project_id: str) -> Response:
self.initialize_workspace(slug, type="chart")
type = request.GET.get("type", "projects")
group_by = request.GET.get("group_by", None)
x_axis = request.GET.get("x_axis", "PRIORITY")
cycle_id = request.GET.get("cycle_id", None)
module_id = request.GET.get("module_id", None)
if type == "custom-work-items":
queryset = (
Issue.issue_objects.filter(**self.filters["base_filters"])
.filter(project_id=project_id)
.select_related("workspace", "state", "parent")
.prefetch_related(
"assignees", "labels", "issue_module__module", "issue_cycle__cycle"
)
)
# Apply cycle/module filters if present
if cycle_id is not None:
cycle_issues = CycleIssue.objects.filter(
**self.filters["base_filters"], cycle_id=cycle_id
).values_list("issue_id", flat=True)
queryset = queryset.filter(id__in=cycle_issues)
elif module_id is not None:
module_issues = ModuleIssue.objects.filter(
**self.filters["base_filters"], module_id=module_id
).values_list("issue_id", flat=True)
queryset = queryset.filter(id__in=module_issues)
# Apply date range filter if available
if self.filters["chart_period_range"]:
start_date, end_date = self.filters["chart_period_range"]
queryset = queryset.filter(
created_at__date__gte=start_date, created_at__date__lte=end_date
)
return Response(
build_analytics_chart(queryset, x_axis, group_by),
status=status.HTTP_200_OK,
)
elif type == "work-items":
# Optionally accept cycle_id or module_id as query params
cycle_id = request.GET.get("cycle_id", None)
module_id = request.GET.get("module_id", None)
return Response(
self.work_item_completion_chart(
project_id=project_id, cycle_id=cycle_id, module_id=module_id
),
status=status.HTTP_200_OK,
)
return Response({"message": "Invalid type"}, status=status.HTTP_400_BAD_REQUEST)

View File

@@ -707,3 +707,14 @@ class ProjectBulkAssetEndpoint(BaseAPIView):
pass
return Response(status=status.HTTP_204_NO_CONTENT)
class AssetCheckEndpoint(BaseAPIView):
"""Endpoint to check if an asset exists."""
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE")
def get(self, request, slug, asset_id):
asset = FileAsset.all_objects.filter(
id=asset_id, workspace__slug=slug, deleted_at__isnull=True
).exists()
return Response({"exists": asset}, status=status.HTTP_200_OK)

View File

@@ -1119,14 +1119,13 @@ class CycleUserPropertiesEndpoint(BaseAPIView):
class CycleProgressEndpoint(BaseAPIView):
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
def get(self, request, slug, project_id, cycle_id):
cycle = Cycle.objects.filter(
workspace__slug=slug, project_id=project_id, id=cycle_id
).first()
if not cycle:
return Response(
{"error": "Cycle not found"}, status=status.HTTP_404_NOT_FOUND
)
)
aggregate_estimates = (
Issue.issue_objects.filter(
estimate_point__estimate__type="points",
@@ -1177,7 +1176,7 @@ class CycleProgressEndpoint(BaseAPIView):
),
)
)
if cycle.progress_snapshot:
if cycle.progress_snapshot:
backlog_issues = cycle.progress_snapshot.get("backlog_issues", 0)
unstarted_issues = cycle.progress_snapshot.get("unstarted_issues", 0)
started_issues = cycle.progress_snapshot.get("started_issues", 0)

View File

@@ -29,6 +29,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
from plane.app.permissions import allow_permission, ROLE
from plane.utils.host import base_host
class CycleIssueViewSet(BaseViewSet):
serializer_class = CycleIssueSerializer
model = CycleIssue

View File

@@ -11,8 +11,7 @@ from rest_framework.response import Response
# Module import
from plane.app.permissions import ROLE, allow_permission
from plane.app.serializers import (ProjectLiteSerializer,
WorkspaceLiteSerializer)
from plane.app.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer
from plane.db.models import Project, Workspace
from plane.license.utils.instance_value import get_configuration_value
from plane.utils.exception_logger import log_exception
@@ -22,6 +21,7 @@ from ..base import BaseAPIView
class LLMProvider:
"""Base class for LLM provider configurations"""
name: str = ""
models: List[str] = []
default_model: str = ""
@@ -34,11 +34,13 @@ class LLMProvider:
"default_model": cls.default_model,
}
class OpenAIProvider(LLMProvider):
name = "OpenAI"
models = ["gpt-3.5-turbo", "gpt-4o-mini", "gpt-4o", "o1-mini", "o1-preview"]
default_model = "gpt-4o-mini"
class AnthropicProvider(LLMProvider):
name = "Anthropic"
models = [
@@ -49,40 +51,45 @@ class AnthropicProvider(LLMProvider):
"claude-2.1",
"claude-2",
"claude-instant-1.2",
"claude-instant-1"
"claude-instant-1",
]
default_model = "claude-3-sonnet-20240229"
class GeminiProvider(LLMProvider):
name = "Gemini"
models = ["gemini-pro", "gemini-1.5-pro-latest", "gemini-pro-vision"]
default_model = "gemini-pro"
SUPPORTED_PROVIDERS = {
"openai": OpenAIProvider,
"anthropic": AnthropicProvider,
"gemini": GeminiProvider,
}
def get_llm_config() -> Tuple[str | None, str | None, str | None]:
"""
Helper to get LLM configuration values, returns:
- api_key, model, provider
"""
api_key, provider_key, model = get_configuration_value([
{
"key": "LLM_API_KEY",
"default": os.environ.get("LLM_API_KEY", None),
},
{
"key": "LLM_PROVIDER",
"default": os.environ.get("LLM_PROVIDER", "openai"),
},
{
"key": "LLM_MODEL",
"default": os.environ.get("LLM_MODEL", None),
},
])
api_key, provider_key, model = get_configuration_value(
[
{
"key": "LLM_API_KEY",
"default": os.environ.get("LLM_API_KEY", None),
},
{
"key": "LLM_PROVIDER",
"default": os.environ.get("LLM_PROVIDER", "openai"),
},
{
"key": "LLM_MODEL",
"default": os.environ.get("LLM_MODEL", None),
},
]
)
provider = SUPPORTED_PROVIDERS.get(provider_key.lower())
if not provider:
@@ -99,16 +106,20 @@ def get_llm_config() -> Tuple[str | None, str | None, str | None]:
# Validate model is supported by provider
if model not in provider.models:
log_exception(ValueError(
f"Model {model} not supported by {provider.name}. "
f"Supported models: {', '.join(provider.models)}"
))
log_exception(
ValueError(
f"Model {model} not supported by {provider.name}. "
f"Supported models: {', '.join(provider.models)}"
)
)
return None, None, None
return api_key, model, provider_key
def get_llm_response(task, prompt, api_key: str, model: str, provider: str) -> Tuple[str | None, str | None]:
def get_llm_response(
task, prompt, api_key: str, model: str, provider: str
) -> Tuple[str | None, str | None]:
"""Helper to get LLM completion response"""
final_text = task + "\n" + prompt
try:
@@ -118,10 +129,7 @@ def get_llm_response(task, prompt, api_key: str, model: str, provider: str) -> T
client = OpenAI(api_key=api_key)
chat_completion = client.chat.completions.create(
model=model,
messages=[
{"role": "user", "content": final_text}
]
model=model, messages=[{"role": "user", "content": final_text}]
)
text = chat_completion.choices[0].message.content
return text, None
@@ -135,6 +143,7 @@ def get_llm_response(task, prompt, api_key: str, model: str, provider: str) -> T
else:
return None, f"Error occurred while generating response from {provider}"
class GPTIntegrationEndpoint(BaseAPIView):
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
def post(self, request, slug, project_id):
@@ -152,7 +161,9 @@ class GPTIntegrationEndpoint(BaseAPIView):
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
)
text, error = get_llm_response(task, request.data.get("prompt", False), api_key, model, provider)
text, error = get_llm_response(
task, request.data.get("prompt", False), api_key, model, provider
)
if not text and error:
return Response(
{"error": "An internal error has occurred."},
@@ -190,7 +201,9 @@ class WorkspaceGPTIntegrationEndpoint(BaseAPIView):
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
)
text, error = get_llm_response(task, request.data.get("prompt", False), api_key, model, provider)
text, error = get_llm_response(
task, request.data.get("prompt", False), api_key, model, provider
)
if not text and error:
return Response(
{"error": "An internal error has occurred."},

View File

@@ -38,6 +38,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
from plane.app.permissions import allow_permission, ROLE
from plane.utils.error_codes import ERROR_CODES
from plane.utils.host import base_host
# Module imports
from .. import BaseViewSet, BaseAPIView

View File

@@ -23,6 +23,7 @@ from plane.settings.storage import S3Storage
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
from plane.utils.host import base_host
class IssueAttachmentEndpoint(BaseAPIView):
serializer_class = IssueAttachmentSerializer
model = FileAsset

View File

@@ -19,6 +19,7 @@ from plane.db.models import IssueComment, ProjectMember, CommentReaction, Projec
from plane.bgtasks.issue_activities_task import issue_activity
from plane.utils.host import base_host
class IssueCommentViewSet(BaseViewSet):
serializer_class = IssueCommentSerializer
model = IssueComment

View File

@@ -15,8 +15,10 @@ from plane.app.serializers import IssueLinkSerializer
from plane.app.permissions import ProjectEntityPermission
from plane.db.models import IssueLink
from plane.bgtasks.issue_activities_task import issue_activity
from plane.bgtasks.work_item_link_task import crawl_work_item_link_title
from plane.utils.host import base_host
class IssueLinkViewSet(BaseViewSet):
permission_classes = [ProjectEntityPermission]
@@ -43,6 +45,9 @@ class IssueLinkViewSet(BaseViewSet):
serializer = IssueLinkSerializer(data=request.data)
if serializer.is_valid():
serializer.save(project_id=project_id, issue_id=issue_id)
crawl_work_item_link_title.delay(
serializer.data.get("id"), serializer.data.get("url")
)
issue_activity.delay(
type="link.activity.created",
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
@@ -54,6 +59,10 @@ class IssueLinkViewSet(BaseViewSet):
notification=True,
origin=base_host(request=request, is_app=True),
)
issue_link = self.get_queryset().get(id=serializer.data.get("id"))
serializer = IssueLinkSerializer(issue_link)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -65,9 +74,14 @@ class IssueLinkViewSet(BaseViewSet):
current_instance = json.dumps(
IssueLinkSerializer(issue_link).data, cls=DjangoJSONEncoder
)
serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
crawl_work_item_link_title.delay(
serializer.data.get("id"), serializer.data.get("url")
)
issue_activity.delay(
type="link.activity.updated",
requested_data=requested_data,
@@ -79,6 +93,9 @@ class IssueLinkViewSet(BaseViewSet):
notification=True,
origin=base_host(request=request, is_app=True),
)
issue_link = self.get_queryset().get(id=serializer.data.get("id"))
serializer = IssueLinkSerializer(issue_link)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)

View File

@@ -17,6 +17,7 @@ from plane.db.models import IssueReaction
from plane.bgtasks.issue_activities_task import issue_activity
from plane.utils.host import base_host
class IssueReactionViewSet(BaseViewSet):
serializer_class = IssueReactionSerializer
model = IssueReaction

View File

@@ -29,6 +29,7 @@ from plane.bgtasks.issue_activities_task import issue_activity
from plane.utils.issue_relation_mapper import get_actual_relation
from plane.utils.host import base_host
class IssueRelationViewSet(BaseViewSet):
serializer_class = IssueRelationSerializer
model = IssueRelation

View File

@@ -25,6 +25,7 @@ from collections import defaultdict
from plane.utils.host import base_host
from plane.utils.order_queryset import order_issue_queryset
class SubIssuesEndpoint(BaseAPIView):
permission_classes = [ProjectEntityPermission]

View File

@@ -63,6 +63,7 @@ from .. import BaseAPIView, BaseViewSet
from plane.bgtasks.recent_visited_task import recent_visited_task
from plane.utils.host import base_host
class ModuleViewSet(BaseViewSet):
model = Module
webhook_event = "module"

View File

@@ -36,6 +36,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
from .. import BaseViewSet
from plane.utils.host import base_host
class ModuleIssueViewSet(BaseViewSet):
serializer_class = ModuleIssueSerializer
model = ModuleIssue
@@ -280,7 +281,11 @@ class ModuleIssueViewSet(BaseViewSet):
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=json.dumps(
{"module_name": module_issue.first().module.name if (module_issue.first() and module_issue.first().module) else None}
{
"module_name": module_issue.first().module.name
if (module_issue.first() and module_issue.first().module)
else None
}
),
epoch=int(timezone.now().timestamp()),
notification=True,

View File

@@ -42,6 +42,7 @@ from plane.bgtasks.page_version_task import page_version
from plane.bgtasks.recent_visited_task import recent_visited_task
from plane.bgtasks.copy_s3_object import copy_s3_objects
def unarchive_archive_page_and_descendants(page_id, archived_at):
# Your SQL query
sql = """
@@ -198,7 +199,7 @@ class PageViewSet(BaseViewSet):
project = Project.objects.get(pk=project_id)
"""
if the role is guest and guest_view_all_features is false and owned by is not
if the role is guest and guest_view_all_features is false and owned by is not
the requesting user then dont show the page
"""
@@ -572,6 +573,12 @@ class PageDuplicateEndpoint(BaseAPIView):
pk=page_id, workspace__slug=slug, projects__id=project_id
).first()
# check for permission
if page.access == Page.PRIVATE_ACCESS and page.owned_by_id != request.user.id:
return Response(
{"error": "Permission denied"}, status=status.HTTP_403_FORBIDDEN
)
# get all the project ids where page is present
project_ids = ProjectPage.objects.filter(page_id=page_id).values_list(
"project_id", flat=True

View File

@@ -445,7 +445,7 @@ class ProjectViewSet(BaseViewSet):
is_active=True,
).exists()
):
project = Project.objects.get(pk=pk)
project = Project.objects.get(pk=pk, workspace__slug=slug)
project.delete()
webhook_activity.delay(
event="project",

View File

@@ -29,6 +29,7 @@ from plane.db.models import (
from plane.db.models.project import ProjectNetwork
from plane.utils.host import base_host
class ProjectInvitationsViewset(BaseViewSet):
serializer_class = ProjectMemberInviteSerializer
model = ProjectMemberInvite

View File

@@ -168,6 +168,8 @@ class ProjectMemberViewSet(BaseViewSet):
workspace__slug=slug,
member__is_bot=False,
is_active=True,
member__member_workspace__workspace__slug=slug,
member__member_workspace__is_active=True,
).select_related("project", "member", "workspace")
serializer = ProjectMemberRoleSerializer(
@@ -313,7 +315,11 @@ class UserProjectRolesEndpoint(BaseAPIView):
def get(self, request, slug):
project_members = ProjectMember.objects.filter(
workspace__slug=slug, member_id=request.user.id, is_active=True
workspace__slug=slug,
member_id=request.user.id,
is_active=True,
member__member_workspace__workspace__slug=slug,
member__member_workspace__is_active=True,
).values("project_id", "role")
project_members = {

View File

@@ -1,5 +1,5 @@
# Django imports
from django.db.models import Q
from django.db.models import Q, QuerySet
# Third party imports
from rest_framework import status
@@ -12,6 +12,95 @@ from plane.utils.issue_search import search_issues
class IssueSearchEndpoint(BaseAPIView):
def filter_issues_by_project(self, project_id: int, issues: QuerySet) -> QuerySet:
"""
Filter issues by project
"""
issues = issues.filter(project_id=project_id)
return issues
def search_issues_by_query(self, query: str, issues: QuerySet) -> QuerySet:
"""
Search issues by query
"""
issues = search_issues(query, issues)
return issues
def search_issues_and_excluding_parent(
self, issues: QuerySet, issue_id: str
) -> QuerySet:
"""
Search issues and epics by query excluding the parent
"""
issue = Issue.issue_objects.filter(pk=issue_id).first()
if issue:
issues = issues.filter(
~Q(pk=issue_id), ~Q(pk=issue.parent_id), ~Q(parent_id=issue_id)
)
return issues
def filter_issues_excluding_related_issues(
self, issue_id: str, issues: QuerySet
) -> QuerySet:
"""
Filter issues excluding related issues
"""
issue = Issue.issue_objects.filter(pk=issue_id).first()
related_issue_ids = (
IssueRelation.objects.filter(Q(related_issue=issue) | Q(issue=issue))
.values_list("issue_id", "related_issue_id")
.distinct()
)
related_issue_ids = [item for sublist in related_issue_ids for item in sublist]
if issue:
issues = issues.filter(~Q(pk=issue_id), ~Q(pk__in=related_issue_ids))
return issues
def filter_root_issues_only(self, issue_id: str, issues: QuerySet) -> QuerySet:
"""
Filter root issues only
"""
issue = Issue.issue_objects.filter(pk=issue_id).first()
if issue:
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
if issue.parent:
issues = issues.filter(~Q(pk=issue.parent_id))
return issues
def exclude_issues_in_cycles(self, issues: QuerySet) -> QuerySet:
"""
Exclude issues in cycles
"""
issues = issues.exclude(
Q(issue_cycle__isnull=False) & Q(issue_cycle__deleted_at__isnull=True)
)
return issues
def exclude_issues_in_module(self, issues: QuerySet, module: str) -> QuerySet:
"""
Exclude issues in a module
"""
issues = issues.exclude(
Q(issue_module__module=module) & Q(issue_module__deleted_at__isnull=True)
)
return issues
def filter_issues_without_target_date(self, issues: QuerySet) -> QuerySet:
"""
Filter issues without a target date
"""
issues = issues.filter(target_date__isnull=True)
return issues
def get(self, request, slug, project_id):
query = request.query_params.get("search", False)
workspace_search = request.query_params.get("workspace_search", "false")
@@ -21,7 +110,6 @@ class IssueSearchEndpoint(BaseAPIView):
module = request.query_params.get("module", False)
sub_issue = request.query_params.get("sub_issue", "false")
target_date = request.query_params.get("target_date", True)
issue_id = request.query_params.get("issue_id", False)
issues = Issue.issue_objects.filter(
@@ -32,52 +120,28 @@ class IssueSearchEndpoint(BaseAPIView):
)
if workspace_search == "false":
issues = issues.filter(project_id=project_id)
issues = self.filter_issues_by_project(project_id, issues)
if query:
issues = search_issues(query, issues)
issues = self.search_issues_by_query(query, issues)
if parent == "true" and issue_id:
issue = Issue.issue_objects.filter(pk=issue_id).first()
if issue:
issues = issues.filter(
~Q(pk=issue_id), ~Q(pk=issue.parent_id), ~Q(parent_id=issue_id)
)
issues = self.search_issues_and_excluding_parent(issues, issue_id)
if issue_relation == "true" and issue_id:
issue = Issue.issue_objects.filter(pk=issue_id).first()
related_issue_ids = IssueRelation.objects.filter(
Q(related_issue=issue) | Q(issue=issue)
).values_list(
"issue_id", "related_issue_id"
).distinct()
issues = self.filter_issues_excluding_related_issues(issue_id, issues)
related_issue_ids = [item for sublist in related_issue_ids for item in sublist]
if issue:
issues = issues.filter(
~Q(pk=issue_id),
~Q(pk__in=related_issue_ids),
)
if sub_issue == "true" and issue_id:
issue = Issue.issue_objects.filter(pk=issue_id).first()
if issue:
issues = issues.filter(~Q(pk=issue_id), parent__isnull=True)
if issue.parent:
issues = issues.filter(~Q(pk=issue.parent_id))
issues = self.filter_root_issues_only(issue_id, issues)
if cycle == "true":
issues = issues.exclude(
Q(issue_cycle__isnull=False) & Q(issue_cycle__deleted_at__isnull=True)
)
issues = self.exclude_issues_in_cycles(issues)
if module:
issues = issues.exclude(
Q(issue_module__module=module)
& Q(issue_module__deleted_at__isnull=True)
)
issues = self.exclude_issues_in_module(issues, module)
if target_date == "none":
issues = issues.filter(target_date__isnull=True)
issues = self.filter_issues_without_target_date(issues)
if ProjectMember.objects.filter(
project_id=project_id, member=self.request.user, is_active=True, role=5

View File

@@ -24,125 +24,152 @@ class TimezoneEndpoint(APIView):
@method_decorator(cache_page(60 * 60 * 2))
def get(self, request):
timezone_locations = [
('Midway Island', 'Pacific/Midway'), # UTC-11:00
('American Samoa', 'Pacific/Pago_Pago'), # UTC-11:00
('Hawaii', 'Pacific/Honolulu'), # UTC-10:00
('Aleutian Islands', 'America/Adak'), # UTC-10:00 (DST: UTC-09:00)
('Marquesas Islands', 'Pacific/Marquesas'), # UTC-09:30
('Alaska', 'America/Anchorage'), # UTC-09:00 (DST: UTC-08:00)
('Gambier Islands', 'Pacific/Gambier'), # UTC-09:00
('Pacific Time (US and Canada)', 'America/Los_Angeles'), # UTC-08:00 (DST: UTC-07:00)
('Baja California', 'America/Tijuana'), # UTC-08:00 (DST: UTC-07:00)
('Mountain Time (US and Canada)', 'America/Denver'), # UTC-07:00 (DST: UTC-06:00)
('Arizona', 'America/Phoenix'), # UTC-07:00
('Chihuahua, Mazatlan', 'America/Chihuahua'), # UTC-07:00 (DST: UTC-06:00)
('Central Time (US and Canada)', 'America/Chicago'), # UTC-06:00 (DST: UTC-05:00)
('Saskatchewan', 'America/Regina'), # UTC-06:00
('Guadalajara, Mexico City, Monterrey', 'America/Mexico_City'), # UTC-06:00 (DST: UTC-05:00)
('Tegucigalpa, Honduras', 'America/Tegucigalpa'), # UTC-06:00
('Costa Rica', 'America/Costa_Rica'), # UTC-06:00
('Eastern Time (US and Canada)', 'America/New_York'), # UTC-05:00 (DST: UTC-04:00)
('Lima', 'America/Lima'), # UTC-05:00
('Bogota', 'America/Bogota'), # UTC-05:00
('Quito', 'America/Guayaquil'), # UTC-05:00
('Chetumal', 'America/Cancun'), # UTC-05:00 (DST: UTC-04:00)
('Caracas (Old Venezuela Time)', 'America/Caracas'), # UTC-04:30
('Atlantic Time (Canada)', 'America/Halifax'), # UTC-04:00 (DST: UTC-03:00)
('Caracas', 'America/Caracas'), # UTC-04:00
('Santiago', 'America/Santiago'), # UTC-04:00 (DST: UTC-03:00)
('La Paz', 'America/La_Paz'), # UTC-04:00
('Manaus', 'America/Manaus'), # UTC-04:00
('Georgetown', 'America/Guyana'), # UTC-04:00
('Bermuda', 'Atlantic/Bermuda'), # UTC-04:00 (DST: UTC-03:00)
('Newfoundland Time (Canada)', 'America/St_Johns'), # UTC-03:30 (DST: UTC-02:30)
('Buenos Aires', 'America/Argentina/Buenos_Aires'), # UTC-03:00
('Brasilia', 'America/Sao_Paulo'), # UTC-03:00
('Greenland', 'America/Godthab'), # UTC-03:00 (DST: UTC-02:00)
('Montevideo', 'America/Montevideo'), # UTC-03:00
('Falkland Islands', 'Atlantic/Stanley'), # UTC-03:00
('South Georgia and the South Sandwich Islands', 'Atlantic/South_Georgia'), # UTC-02:00
('Azores', 'Atlantic/Azores'), # UTC-01:00 (DST: UTC+00:00)
('Cape Verde Islands', 'Atlantic/Cape_Verde'), # UTC-01:00
('Dublin', 'Europe/Dublin'), # UTC+00:00 (DST: UTC+01:00)
('Reykjavik', 'Atlantic/Reykjavik'), # UTC+00:00
('Lisbon', 'Europe/Lisbon'), # UTC+00:00 (DST: UTC+01:00)
('Monrovia', 'Africa/Monrovia'), # UTC+00:00
('Casablanca', 'Africa/Casablanca'), # UTC+00:00 (DST: UTC+01:00)
('Central European Time (Berlin, Rome, Paris)', 'Europe/Paris'), # UTC+01:00 (DST: UTC+02:00)
('West Central Africa', 'Africa/Lagos'), # UTC+01:00
('Algiers', 'Africa/Algiers'), # UTC+01:00
('Lagos', 'Africa/Lagos'), # UTC+01:00
('Tunis', 'Africa/Tunis'), # UTC+01:00
('Eastern European Time (Cairo, Helsinki, Kyiv)', 'Europe/Kiev'), # UTC+02:00 (DST: UTC+03:00)
('Athens', 'Europe/Athens'), # UTC+02:00 (DST: UTC+03:00)
('Jerusalem', 'Asia/Jerusalem'), # UTC+02:00 (DST: UTC+03:00)
('Johannesburg', 'Africa/Johannesburg'), # UTC+02:00
('Harare, Pretoria', 'Africa/Harare'), # UTC+02:00
('Moscow Time', 'Europe/Moscow'), # UTC+03:00
('Baghdad', 'Asia/Baghdad'), # UTC+03:00
('Nairobi', 'Africa/Nairobi'), # UTC+03:00
('Kuwait, Riyadh', 'Asia/Riyadh'), # UTC+03:00
('Tehran', 'Asia/Tehran'), # UTC+03:30 (DST: UTC+04:30)
('Abu Dhabi', 'Asia/Dubai'), # UTC+04:00
('Baku', 'Asia/Baku'), # UTC+04:00 (DST: UTC+05:00)
('Yerevan', 'Asia/Yerevan'), # UTC+04:00 (DST: UTC+05:00)
('Astrakhan', 'Europe/Astrakhan'), # UTC+04:00
('Tbilisi', 'Asia/Tbilisi'), # UTC+04:00
('Mauritius', 'Indian/Mauritius'), # UTC+04:00
('Islamabad', 'Asia/Karachi'), # UTC+05:00
('Karachi', 'Asia/Karachi'), # UTC+05:00
('Tashkent', 'Asia/Tashkent'), # UTC+05:00
('Yekaterinburg', 'Asia/Yekaterinburg'), # UTC+05:00
('Maldives', 'Indian/Maldives'), # UTC+05:00
('Chagos', 'Indian/Chagos'), # UTC+05:00
('Chennai', 'Asia/Kolkata'), # UTC+05:30
('Kolkata', 'Asia/Kolkata'), # UTC+05:30
('Mumbai', 'Asia/Kolkata'), # UTC+05:30
('New Delhi', 'Asia/Kolkata'), # UTC+05:30
('Sri Jayawardenepura', 'Asia/Colombo'), # UTC+05:30
('Kathmandu', 'Asia/Kathmandu'), # UTC+05:45
('Dhaka', 'Asia/Dhaka'), # UTC+06:00
('Almaty', 'Asia/Almaty'), # UTC+06:00
('Bishkek', 'Asia/Bishkek'), # UTC+06:00
('Thimphu', 'Asia/Thimphu'), # UTC+06:00
('Yangon (Rangoon)', 'Asia/Yangon'), # UTC+06:30
('Cocos Islands', 'Indian/Cocos'), # UTC+06:30
('Bangkok', 'Asia/Bangkok'), # UTC+07:00
('Hanoi', 'Asia/Ho_Chi_Minh'), # UTC+07:00
('Jakarta', 'Asia/Jakarta'), # UTC+07:00
('Novosibirsk', 'Asia/Novosibirsk'), # UTC+07:00
('Krasnoyarsk', 'Asia/Krasnoyarsk'), # UTC+07:00
('Beijing', 'Asia/Shanghai'), # UTC+08:00
('Singapore', 'Asia/Singapore'), # UTC+08:00
('Perth', 'Australia/Perth'), # UTC+08:00
('Hong Kong', 'Asia/Hong_Kong'), # UTC+08:00
('Ulaanbaatar', 'Asia/Ulaanbaatar'), # UTC+08:00
('Palau', 'Pacific/Palau'), # UTC+08:00
('Eucla', 'Australia/Eucla'), # UTC+08:45
('Tokyo', 'Asia/Tokyo'), # UTC+09:00
('Seoul', 'Asia/Seoul'), # UTC+09:00
('Yakutsk', 'Asia/Yakutsk'), # UTC+09:00
('Adelaide', 'Australia/Adelaide'), # UTC+09:30 (DST: UTC+10:30)
('Darwin', 'Australia/Darwin'), # UTC+09:30
('Sydney', 'Australia/Sydney'), # UTC+10:00 (DST: UTC+11:00)
('Brisbane', 'Australia/Brisbane'), # UTC+10:00
('Guam', 'Pacific/Guam'), # UTC+10:00
('Vladivostok', 'Asia/Vladivostok'), # UTC+10:00
('Tahiti', 'Pacific/Tahiti'), # UTC+10:00
('Lord Howe Island', 'Australia/Lord_Howe'), # UTC+10:30 (DST: UTC+11:00)
('Solomon Islands', 'Pacific/Guadalcanal'), # UTC+11:00
('Magadan', 'Asia/Magadan'), # UTC+11:00
('Norfolk Island', 'Pacific/Norfolk'), # UTC+11:00
('Bougainville Island', 'Pacific/Bougainville'), # UTC+11:00
('Chokurdakh', 'Asia/Srednekolymsk'), # UTC+11:00
('Auckland', 'Pacific/Auckland'), # UTC+12:00 (DST: UTC+13:00)
('Wellington', 'Pacific/Auckland'), # UTC+12:00 (DST: UTC+13:00)
('Fiji Islands', 'Pacific/Fiji'), # UTC+12:00 (DST: UTC+13:00)
('Anadyr', 'Asia/Anadyr'), # UTC+12:00
('Chatham Islands', 'Pacific/Chatham'), # UTC+12:45 (DST: UTC+13:45)
("Nuku'alofa", 'Pacific/Tongatapu'), # UTC+13:00
('Samoa', 'Pacific/Apia'), # UTC+13:00 (DST: UTC+14:00)
('Kiritimati Island', 'Pacific/Kiritimati') # UTC+14:00
("Midway Island", "Pacific/Midway"), # UTC-11:00
("American Samoa", "Pacific/Pago_Pago"), # UTC-11:00
("Hawaii", "Pacific/Honolulu"), # UTC-10:00
("Aleutian Islands", "America/Adak"), # UTC-10:00 (DST: UTC-09:00)
("Marquesas Islands", "Pacific/Marquesas"), # UTC-09:30
("Alaska", "America/Anchorage"), # UTC-09:00 (DST: UTC-08:00)
("Gambier Islands", "Pacific/Gambier"), # UTC-09:00
(
"Pacific Time (US and Canada)",
"America/Los_Angeles",
), # UTC-08:00 (DST: UTC-07:00)
("Baja California", "America/Tijuana"), # UTC-08:00 (DST: UTC-07:00)
(
"Mountain Time (US and Canada)",
"America/Denver",
), # UTC-07:00 (DST: UTC-06:00)
("Arizona", "America/Phoenix"), # UTC-07:00
("Chihuahua, Mazatlan", "America/Chihuahua"), # UTC-07:00 (DST: UTC-06:00)
(
"Central Time (US and Canada)",
"America/Chicago",
), # UTC-06:00 (DST: UTC-05:00)
("Saskatchewan", "America/Regina"), # UTC-06:00
(
"Guadalajara, Mexico City, Monterrey",
"America/Mexico_City",
), # UTC-06:00 (DST: UTC-05:00)
("Tegucigalpa, Honduras", "America/Tegucigalpa"), # UTC-06:00
("Costa Rica", "America/Costa_Rica"), # UTC-06:00
(
"Eastern Time (US and Canada)",
"America/New_York",
), # UTC-05:00 (DST: UTC-04:00)
("Lima", "America/Lima"), # UTC-05:00
("Bogota", "America/Bogota"), # UTC-05:00
("Quito", "America/Guayaquil"), # UTC-05:00
("Chetumal", "America/Cancun"), # UTC-05:00 (DST: UTC-04:00)
("Caracas (Old Venezuela Time)", "America/Caracas"), # UTC-04:30
("Atlantic Time (Canada)", "America/Halifax"), # UTC-04:00 (DST: UTC-03:00)
("Caracas", "America/Caracas"), # UTC-04:00
("Santiago", "America/Santiago"), # UTC-04:00 (DST: UTC-03:00)
("La Paz", "America/La_Paz"), # UTC-04:00
("Manaus", "America/Manaus"), # UTC-04:00
("Georgetown", "America/Guyana"), # UTC-04:00
("Bermuda", "Atlantic/Bermuda"), # UTC-04:00 (DST: UTC-03:00)
(
"Newfoundland Time (Canada)",
"America/St_Johns",
), # UTC-03:30 (DST: UTC-02:30)
("Buenos Aires", "America/Argentina/Buenos_Aires"), # UTC-03:00
("Brasilia", "America/Sao_Paulo"), # UTC-03:00
("Greenland", "America/Godthab"), # UTC-03:00 (DST: UTC-02:00)
("Montevideo", "America/Montevideo"), # UTC-03:00
("Falkland Islands", "Atlantic/Stanley"), # UTC-03:00
(
"South Georgia and the South Sandwich Islands",
"Atlantic/South_Georgia",
), # UTC-02:00
("Azores", "Atlantic/Azores"), # UTC-01:00 (DST: UTC+00:00)
("Cape Verde Islands", "Atlantic/Cape_Verde"), # UTC-01:00
("Dublin", "Europe/Dublin"), # UTC+00:00 (DST: UTC+01:00)
("Reykjavik", "Atlantic/Reykjavik"), # UTC+00:00
("Lisbon", "Europe/Lisbon"), # UTC+00:00 (DST: UTC+01:00)
("Monrovia", "Africa/Monrovia"), # UTC+00:00
("Casablanca", "Africa/Casablanca"), # UTC+00:00 (DST: UTC+01:00)
(
"Central European Time (Berlin, Rome, Paris)",
"Europe/Paris",
), # UTC+01:00 (DST: UTC+02:00)
("West Central Africa", "Africa/Lagos"), # UTC+01:00
("Algiers", "Africa/Algiers"), # UTC+01:00
("Lagos", "Africa/Lagos"), # UTC+01:00
("Tunis", "Africa/Tunis"), # UTC+01:00
(
"Eastern European Time (Cairo, Helsinki, Kyiv)",
"Europe/Kiev",
), # UTC+02:00 (DST: UTC+03:00)
("Athens", "Europe/Athens"), # UTC+02:00 (DST: UTC+03:00)
("Jerusalem", "Asia/Jerusalem"), # UTC+02:00 (DST: UTC+03:00)
("Johannesburg", "Africa/Johannesburg"), # UTC+02:00
("Harare, Pretoria", "Africa/Harare"), # UTC+02:00
("Moscow Time", "Europe/Moscow"), # UTC+03:00
("Baghdad", "Asia/Baghdad"), # UTC+03:00
("Nairobi", "Africa/Nairobi"), # UTC+03:00
("Kuwait, Riyadh", "Asia/Riyadh"), # UTC+03:00
("Tehran", "Asia/Tehran"), # UTC+03:30 (DST: UTC+04:30)
("Abu Dhabi", "Asia/Dubai"), # UTC+04:00
("Baku", "Asia/Baku"), # UTC+04:00 (DST: UTC+05:00)
("Yerevan", "Asia/Yerevan"), # UTC+04:00 (DST: UTC+05:00)
("Astrakhan", "Europe/Astrakhan"), # UTC+04:00
("Tbilisi", "Asia/Tbilisi"), # UTC+04:00
("Mauritius", "Indian/Mauritius"), # UTC+04:00
("Islamabad", "Asia/Karachi"), # UTC+05:00
("Karachi", "Asia/Karachi"), # UTC+05:00
("Tashkent", "Asia/Tashkent"), # UTC+05:00
("Yekaterinburg", "Asia/Yekaterinburg"), # UTC+05:00
("Maldives", "Indian/Maldives"), # UTC+05:00
("Chagos", "Indian/Chagos"), # UTC+05:00
("Chennai", "Asia/Kolkata"), # UTC+05:30
("Kolkata", "Asia/Kolkata"), # UTC+05:30
("Mumbai", "Asia/Kolkata"), # UTC+05:30
("New Delhi", "Asia/Kolkata"), # UTC+05:30
("Sri Jayawardenepura", "Asia/Colombo"), # UTC+05:30
("Kathmandu", "Asia/Kathmandu"), # UTC+05:45
("Dhaka", "Asia/Dhaka"), # UTC+06:00
("Almaty", "Asia/Almaty"), # UTC+06:00
("Bishkek", "Asia/Bishkek"), # UTC+06:00
("Thimphu", "Asia/Thimphu"), # UTC+06:00
("Yangon (Rangoon)", "Asia/Yangon"), # UTC+06:30
("Cocos Islands", "Indian/Cocos"), # UTC+06:30
("Bangkok", "Asia/Bangkok"), # UTC+07:00
("Hanoi", "Asia/Ho_Chi_Minh"), # UTC+07:00
("Jakarta", "Asia/Jakarta"), # UTC+07:00
("Novosibirsk", "Asia/Novosibirsk"), # UTC+07:00
("Krasnoyarsk", "Asia/Krasnoyarsk"), # UTC+07:00
("Beijing", "Asia/Shanghai"), # UTC+08:00
("Singapore", "Asia/Singapore"), # UTC+08:00
("Perth", "Australia/Perth"), # UTC+08:00
("Hong Kong", "Asia/Hong_Kong"), # UTC+08:00
("Ulaanbaatar", "Asia/Ulaanbaatar"), # UTC+08:00
("Palau", "Pacific/Palau"), # UTC+08:00
("Eucla", "Australia/Eucla"), # UTC+08:45
("Tokyo", "Asia/Tokyo"), # UTC+09:00
("Seoul", "Asia/Seoul"), # UTC+09:00
("Yakutsk", "Asia/Yakutsk"), # UTC+09:00
("Adelaide", "Australia/Adelaide"), # UTC+09:30 (DST: UTC+10:30)
("Darwin", "Australia/Darwin"), # UTC+09:30
("Sydney", "Australia/Sydney"), # UTC+10:00 (DST: UTC+11:00)
("Brisbane", "Australia/Brisbane"), # UTC+10:00
("Guam", "Pacific/Guam"), # UTC+10:00
("Vladivostok", "Asia/Vladivostok"), # UTC+10:00
("Tahiti", "Pacific/Tahiti"), # UTC+10:00
("Lord Howe Island", "Australia/Lord_Howe"), # UTC+10:30 (DST: UTC+11:00)
("Solomon Islands", "Pacific/Guadalcanal"), # UTC+11:00
("Magadan", "Asia/Magadan"), # UTC+11:00
("Norfolk Island", "Pacific/Norfolk"), # UTC+11:00
("Bougainville Island", "Pacific/Bougainville"), # UTC+11:00
("Chokurdakh", "Asia/Srednekolymsk"), # UTC+11:00
("Auckland", "Pacific/Auckland"), # UTC+12:00 (DST: UTC+13:00)
("Wellington", "Pacific/Auckland"), # UTC+12:00 (DST: UTC+13:00)
("Fiji Islands", "Pacific/Fiji"), # UTC+12:00 (DST: UTC+13:00)
("Anadyr", "Asia/Anadyr"), # UTC+12:00
("Chatham Islands", "Pacific/Chatham"), # UTC+12:45 (DST: UTC+13:45)
("Nuku'alofa", "Pacific/Tongatapu"), # UTC+13:00
("Samoa", "Pacific/Apia"), # UTC+13:00 (DST: UTC+14:00)
("Kiritimati Island", "Pacific/Kiritimati"), # UTC+14:00
]
timezone_list = []
@@ -150,7 +177,6 @@ class TimezoneEndpoint(APIView):
# Process timezone mapping
for friendly_name, tz_identifier in timezone_locations:
try:
tz = pytz.timezone(tz_identifier)
current_offset = now.astimezone(tz).strftime("%z")

View File

@@ -3,6 +3,7 @@ import csv
import io
import os
from datetime import date
import uuid
from dateutil.relativedelta import relativedelta
from django.db import IntegrityError
@@ -35,6 +36,7 @@ from plane.db.models import (
Workspace,
WorkspaceMember,
WorkspaceTheme,
Profile,
)
from plane.app.permissions import ROLE, allow_permission
from django.utils.decorators import method_decorator
@@ -43,6 +45,7 @@ from django.views.decorators.vary import vary_on_cookie
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
from plane.license.utils.instance_value import get_configuration_value
from plane.bgtasks.workspace_seed_task import workspace_seed
from plane.utils.url import contains_url
class WorkSpaceViewSet(BaseViewSet):
@@ -109,6 +112,12 @@ class WorkSpaceViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST,
)
if contains_url(name):
return Response(
{"error": "Name cannot contain a URL"},
status=status.HTTP_400_BAD_REQUEST,
)
if serializer.is_valid(raise_exception=True):
serializer.save(owner=request.user)
# Create Workspace member
@@ -150,8 +159,18 @@ class WorkSpaceViewSet(BaseViewSet):
def partial_update(self, request, *args, **kwargs):
return super().partial_update(request, *args, **kwargs)
def remove_last_workspace_ids_from_user_settings(self, id: uuid.UUID) -> None:
"""
Remove the last workspace id from the user settings
"""
Profile.objects.filter(last_workspace_id=id).update(last_workspace_id=None)
return
@allow_permission([ROLE.ADMIN], level="WORKSPACE")
def destroy(self, request, *args, **kwargs):
# Get the workspace
workspace = self.get_object()
self.remove_last_workspace_ids_from_user_settings(workspace.id)
return super().destroy(request, *args, **kwargs)
@@ -159,8 +178,6 @@ class UserWorkSpacesEndpoint(BaseAPIView):
search_fields = ["name"]
filterset_fields = ["owner"]
@method_decorator(cache_control(private=True, max_age=12))
@method_decorator(vary_on_cookie)
def get(self, request):
fields = [field for field in request.GET.get("fields", "").split(",") if field]
member_count = (

View File

@@ -12,6 +12,7 @@ from plane.app.permissions import WorkspaceViewerPermission
from plane.app.serializers.cycle import CycleSerializer
from plane.utils.timezone_converter import user_timezone_converter
class WorkspaceCyclesEndpoint(BaseAPIView):
permission_classes = [WorkspaceViewerPermission]

View File

@@ -38,6 +38,7 @@ from plane.bgtasks.issue_activities_task import issue_activity
from plane.utils.issue_filters import issue_filters
from plane.utils.host import base_host
class WorkspaceDraftIssueViewSet(BaseViewSet):
model = DraftIssue

View File

@@ -1,5 +1,6 @@
# Django imports
from django.db.models import Count, Q, OuterRef, Subquery, IntegerField
from django.utils import timezone
from django.db.models.functions import Coalesce
# Third party modules
@@ -133,7 +134,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
# Deactivate the users from the projects where the user is part of
_ = ProjectMember.objects.filter(
workspace__slug=slug, member_id=workspace_member.member_id, is_active=True
).update(is_active=False)
).update(is_active=False, updated_at=timezone.now())
workspace_member.is_active = False
workspace_member.save()
@@ -194,7 +195,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
# # Deactivate the users from the projects where the user is part of
_ = ProjectMember.objects.filter(
workspace__slug=slug, member_id=workspace_member.member_id, is_active=True
).update(is_active=False)
).update(is_active=False, updated_at=timezone.now())
# # Deactivate the user
workspace_member.is_active = False

View File

@@ -27,10 +27,7 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
create_preference_keys = []
keys = [
key
for key, _ in WorkspaceUserPreference.UserPreferenceKeys.choices
]
keys = [key for key, _ in WorkspaceUserPreference.UserPreferenceKeys.choices]
for preference in keys:
if preference not in get_preference.values_list("key", flat=True):
@@ -39,7 +36,10 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
preference = WorkspaceUserPreference.objects.bulk_create(
[
WorkspaceUserPreference(
key=key, user=request.user, workspace=workspace, sort_order=(65535 + (i*10000))
key=key,
user=request.user,
workspace=workspace,
sort_order=(65535 + (i * 10000)),
)
for i, key in enumerate(create_preference_keys)
],
@@ -47,10 +47,13 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
ignore_conflicts=True,
)
preferences = WorkspaceUserPreference.objects.filter(
user=request.user, workspace_id=workspace.id
).order_by("sort_order").values("key", "is_pinned", "sort_order")
preferences = (
WorkspaceUserPreference.objects.filter(
user=request.user, workspace_id=workspace.id
)
.order_by("sort_order")
.values("key", "is_pinned", "sort_order")
)
user_preferences = {}
@@ -58,7 +61,7 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
user_preferences[(str(preference["key"]))] = {
"is_pinned": preference["is_pinned"],
"sort_order": preference["sort_order"],
}
}
return Response(
user_preferences,
status=status.HTTP_200_OK,

View File

@@ -18,6 +18,7 @@ from plane.bgtasks.user_activation_email_task import user_activation_email
from plane.utils.host import base_host
from plane.utils.ip_address import get_client_ip
class Adapter:
"""Common interface for all auth providers"""

View File

@@ -41,7 +41,6 @@ AUTHENTICATION_ERROR_CODES = {
"GOOGLE_OAUTH_PROVIDER_ERROR": 5115,
"GITHUB_OAUTH_PROVIDER_ERROR": 5120,
"GITLAB_OAUTH_PROVIDER_ERROR": 5121,
# Reset Password
"INVALID_PASSWORD_TOKEN": 5125,
"EXPIRED_PASSWORD_TOKEN": 5130,

View File

@@ -25,23 +25,24 @@ class GitHubOAuthProvider(OauthAdapter):
organization_scope = "read:org"
def __init__(self, request, code=None, state=None, callback=None):
GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, GITHUB_ORGANIZATION_ID = get_configuration_value(
[
{
"key": "GITHUB_CLIENT_ID",
"default": os.environ.get("GITHUB_CLIENT_ID"),
},
{
"key": "GITHUB_CLIENT_SECRET",
"default": os.environ.get("GITHUB_CLIENT_SECRET"),
},
{
"key": "GITHUB_ORGANIZATION_ID",
"default": os.environ.get("GITHUB_ORGANIZATION_ID"),
},
]
GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, GITHUB_ORGANIZATION_ID = (
get_configuration_value(
[
{
"key": "GITHUB_CLIENT_ID",
"default": os.environ.get("GITHUB_CLIENT_ID"),
},
{
"key": "GITHUB_CLIENT_SECRET",
"default": os.environ.get("GITHUB_CLIENT_SECRET"),
},
{
"key": "GITHUB_ORGANIZATION_ID",
"default": os.environ.get("GITHUB_ORGANIZATION_ID"),
},
]
)
)
if not (GITHUB_CLIENT_ID and GITHUB_CLIENT_SECRET):
@@ -128,7 +129,10 @@ class GitHubOAuthProvider(OauthAdapter):
def is_user_in_organization(self, github_username):
headers = {"Authorization": f"Bearer {self.token_data.get('access_token')}"}
response = requests.get(f"{self.org_membership_url}/{self.organization_id}/memberships/{github_username}", headers=headers)
response = requests.get(
f"{self.org_membership_url}/{self.organization_id}/memberships/{github_username}",
headers=headers,
)
return response.status_code == 200 # 200 means the user is a member
def set_user_data(self):
@@ -145,7 +149,6 @@ class GitHubOAuthProvider(OauthAdapter):
error_message="GITHUB_USER_NOT_IN_ORG",
)
email = self.__get_email(headers=headers)
super().set_user_data(
{

View File

@@ -42,11 +42,11 @@ urlpatterns = [
# credentials
path("sign-in/", SignInAuthEndpoint.as_view(), name="sign-in"),
path("sign-up/", SignUpAuthEndpoint.as_view(), name="sign-up"),
path("spaces/sign-in/", SignInAuthSpaceEndpoint.as_view(), name="sign-in"),
path("spaces/sign-up/", SignUpAuthSpaceEndpoint.as_view(), name="sign-in"),
path("spaces/sign-in/", SignInAuthSpaceEndpoint.as_view(), name="space-sign-in"),
path("spaces/sign-up/", SignUpAuthSpaceEndpoint.as_view(), name="space-sign-up"),
# signout
path("sign-out/", SignOutAuthEndpoint.as_view(), name="sign-out"),
path("spaces/sign-out/", SignOutAuthSpaceEndpoint.as_view(), name="sign-out"),
path("spaces/sign-out/", SignOutAuthSpaceEndpoint.as_view(), name="space-sign-out"),
# csrf token
path("get-csrf-token/", CSRFTokenEndpoint.as_view(), name="get_csrf_token"),
# Magic sign in
@@ -56,17 +56,17 @@ urlpatterns = [
path(
"spaces/magic-generate/",
MagicGenerateSpaceEndpoint.as_view(),
name="magic-generate",
name="space-magic-generate",
),
path(
"spaces/magic-sign-in/",
MagicSignInSpaceEndpoint.as_view(),
name="magic-sign-in",
name="space-magic-sign-in",
),
path(
"spaces/magic-sign-up/",
MagicSignUpSpaceEndpoint.as_view(),
name="magic-sign-up",
name="space-magic-sign-up",
),
## Google Oauth
path("google/", GoogleOauthInitiateEndpoint.as_view(), name="google-initiate"),
@@ -74,12 +74,12 @@ urlpatterns = [
path(
"spaces/google/",
GoogleOauthInitiateSpaceEndpoint.as_view(),
name="google-initiate",
name="space-google-initiate",
),
path(
"google/callback/",
"spaces/google/callback/",
GoogleCallbackSpaceEndpoint.as_view(),
name="google-callback",
name="space-google-callback",
),
## Github Oauth
path("github/", GitHubOauthInitiateEndpoint.as_view(), name="github-initiate"),
@@ -87,12 +87,12 @@ urlpatterns = [
path(
"spaces/github/",
GitHubOauthInitiateSpaceEndpoint.as_view(),
name="github-initiate",
name="space-github-initiate",
),
path(
"spaces/github/callback/",
GitHubCallbackSpaceEndpoint.as_view(),
name="github-callback",
name="space-github-callback",
),
## Gitlab Oauth
path("gitlab/", GitLabOauthInitiateEndpoint.as_view(), name="gitlab-initiate"),
@@ -100,12 +100,12 @@ urlpatterns = [
path(
"spaces/gitlab/",
GitLabOauthInitiateSpaceEndpoint.as_view(),
name="gitlab-initiate",
name="space-gitlab-initiate",
),
path(
"spaces/gitlab/callback/",
GitLabCallbackSpaceEndpoint.as_view(),
name="gitlab-callback",
name="space-gitlab-callback",
),
# Email Check
path("email-check/", EmailCheckEndpoint.as_view(), name="email-check"),
@@ -120,12 +120,12 @@ urlpatterns = [
path(
"spaces/forgot-password/",
ForgotPasswordSpaceEndpoint.as_view(),
name="forgot-password",
name="space-forgot-password",
),
path(
"spaces/reset-password/<uidb64>/<token>/",
ResetPasswordSpaceEndpoint.as_view(),
name="forgot-password",
name="space-forgot-password",
),
path("change-password/", ChangePasswordEndpoint.as_view(), name="forgot-password"),
path("set-password/", SetUserPasswordEndpoint.as_view(), name="set-password"),

View File

@@ -1,20 +1,29 @@
# Django imports
from django.conf import settings
from django.http import HttpRequest
# Third party imports
from rest_framework.request import Request
# Module imports
from plane.utils.ip_address import get_client_ip
def base_host(request: Request | HttpRequest, is_admin: bool = False, is_space: bool = False, is_app: bool = False) -> str:
def base_host(
request: Request | HttpRequest,
is_admin: bool = False,
is_space: bool = False,
is_app: bool = False,
) -> str:
"""Utility function to return host / origin from the request"""
# Calculate the base origin from request
base_origin = settings.WEB_URL or settings.APP_BASE_URL
# Admin redirections
# Admin redirection
if is_admin:
admin_base_path = getattr(settings, "ADMIN_BASE_PATH", "/god-mode/")
admin_base_path = getattr(settings, "ADMIN_BASE_PATH", None)
if not isinstance(admin_base_path, str):
admin_base_path = "/god-mode/"
if not admin_base_path.startswith("/"):
admin_base_path = "/" + admin_base_path
if not admin_base_path.endswith("/"):
@@ -25,9 +34,11 @@ def base_host(request: Request | HttpRequest, is_admin: bool = False, is_space:
else:
return base_origin + admin_base_path
# Space redirections
# Space redirection
if is_space:
space_base_path = getattr(settings, "SPACE_BASE_PATH", "/spaces/")
space_base_path = getattr(settings, "SPACE_BASE_PATH", None)
if not isinstance(space_base_path, str):
space_base_path = "/spaces/"
if not space_base_path.startswith("/"):
space_base_path = "/" + space_base_path
if not space_base_path.endswith("/"):

View File

@@ -6,6 +6,7 @@ from django.conf import settings
from plane.utils.host import base_host
from plane.utils.ip_address import get_client_ip
def user_login(request, user, is_app=False, is_admin=False, is_space=False):
login(request=request, user=user)

View File

@@ -21,6 +21,7 @@ from plane.authentication.adapter.error import (
)
from plane.utils.path_validator import validate_next_path
class SignInAuthEndpoint(View):
def post(self, request):
next_path = request.POST.get("next_path")

View File

@@ -18,6 +18,7 @@ from plane.authentication.adapter.error import (
)
from plane.utils.path_validator import validate_next_path
class GitHubOauthInitiateEndpoint(View):
def get(self, request):
# Get host and next path

View File

@@ -18,6 +18,7 @@ from plane.authentication.adapter.error import (
)
from plane.utils.path_validator import validate_next_path
class GitLabOauthInitiateEndpoint(View):
def get(self, request):
# Get host and next path

View File

@@ -20,6 +20,7 @@ from plane.authentication.adapter.error import (
)
from plane.utils.path_validator import validate_next_path
class GoogleOauthInitiateEndpoint(View):
def get(self, request):
request.session["host"] = base_host(request=request, is_app=True)
@@ -95,7 +96,9 @@ class GoogleCallbackEndpoint(View):
# Get the redirection path
path = get_redirection_path(user=user)
# redirect to referer path
url = urljoin(base_host, str(validate_next_path(next_path)) if next_path else path)
url = urljoin(
base_host, str(validate_next_path(next_path)) if next_path else path
)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()

View File

@@ -53,12 +53,14 @@ class ChangePasswordEndpoint(APIView):
error_message="MISSING_PASSWORD",
payload={"error": "Old password is missing"},
)
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
return Response(
exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST
)
# Get the new password
new_password = request.data.get("new_password", False)
if not new_password:
if not new_password:
exc = AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES["MISSING_PASSWORD"],
error_message="MISSING_PASSWORD",
@@ -66,7 +68,6 @@ class ChangePasswordEndpoint(APIView):
)
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
# If the user password is not autoset then we need to check the old passwords
if not user.is_password_autoset and not user.check_password(old_password):
exc = AuthenticationException(

View File

@@ -25,6 +25,7 @@ from plane.authentication.adapter.error import (
)
from plane.utils.path_validator import validate_next_path
class MagicGenerateSpaceEndpoint(APIView):
permission_classes = [AllowAny]
@@ -38,7 +39,6 @@ class MagicGenerateSpaceEndpoint(APIView):
)
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
email = request.data.get("email", "").strip().lower()
try:
validate_email(email)

View File

@@ -459,8 +459,37 @@ def analytic_export_task(email, data, slug):
csv_buffer = generate_csv_from_rows(rows)
send_export_email(email, slug, csv_buffer, rows)
logging.getLogger("plane").info("Email sent succesfully.")
logging.getLogger("plane.worker").info("Email sent successfully.")
return
except Exception as e:
log_exception(e)
return
@shared_task
def export_analytics_to_csv_email(data, headers, keys, email, slug):
try:
"""
Prepares a CSV from data and sends it as an email attachment.
Parameters:
- data: List of dictionaries (e.g. from .values())
- headers: List of CSV column headers
- keys: Keys to extract from each data item (dict)
- email: Email address to send to
- slug: Used for the filename
"""
# Prepare rows: header + data rows
rows = [headers]
for item in data:
row = [item.get(key, "") for key in keys]
rows.append(row)
# Generate CSV buffer
csv_buffer = generate_csv_from_rows(rows)
# Send email with CSV attachment
send_export_email(email=email, slug=slug, csv_buffer=csv_buffer, rows=rows)
except Exception as e:
log_exception(e)
return

View File

@@ -12,6 +12,7 @@ from plane.db.models import FileAsset, Page, Issue
from plane.utils.exception_logger import log_exception
from plane.settings.storage import S3Storage
from celery import shared_task
from plane.utils.url import normalize_url_path
def get_entity_id_field(entity_type, entity_id):
@@ -67,11 +68,14 @@ def sync_with_external_service(entity_name, description_html):
"description_html": description_html,
"variant": "rich" if entity_name == "PAGE" else "document",
}
response = requests.post(
f"{settings.LIVE_BASE_URL}/convert-document/",
json=data,
headers=None,
)
live_url = settings.LIVE_URL
if not live_url:
return {}
url = normalize_url_path(f"{live_url}/convert-document/")
response = requests.post(url, json=data, headers=None)
if response.status_code == 200:
return response.json()
except requests.RequestException as e:

View File

@@ -33,6 +33,7 @@ from plane.db.models import (
Intake,
IntakeIssue,
)
from plane.db.models.intake import SourceType
def create_project(workspace, user_id):
@@ -388,7 +389,7 @@ def create_intake_issues(workspace, project, user_id, intake_issue_count):
if status == 0
else None
),
source="in-app",
source=SourceType.IN_APP,
workspace=workspace,
project=project,
)

View File

@@ -284,6 +284,7 @@ def send_email_notification(
"project": str(issue.project.name),
"user_preference": f"{base_api}/profile/preferences/email",
"comments": comments,
"entity_type": "issue",
}
html_content = render_to_string(
"emails/notifications/issue-updates.html", context
@@ -309,7 +310,7 @@ def send_email_notification(
)
msg.attach_alternative(html_content, "text/html")
msg.send()
logging.getLogger("plane").info("Email Sent Successfully")
logging.getLogger("plane.worker").info("Email Sent Successfully")
# Update the logs
EmailNotificationLog.objects.filter(
@@ -325,7 +326,7 @@ def send_email_notification(
release_lock(lock_id=lock_id)
return
else:
logging.getLogger("plane").info("Duplicate email received skipping")
logging.getLogger("plane.worker").info("Duplicate email received skipping")
return
except (Issue.DoesNotExist, User.DoesNotExist):
release_lock(lock_id=lock_id)

View File

@@ -3,34 +3,49 @@ import csv
import io
import json
import zipfile
from typing import List
import boto3
from botocore.client import Config
from uuid import UUID
from datetime import datetime, date
# Third party imports
from celery import shared_task
# Django imports
from django.conf import settings
from django.utils import timezone
from openpyxl import Workbook
from django.db.models import F, Prefetch
from collections import defaultdict
# Module imports
from plane.db.models import ExporterHistory, Issue
from plane.db.models import ExporterHistory, Issue, FileAsset, Label, User, IssueComment
from plane.utils.exception_logger import log_exception
def dateTimeConverter(time):
def dateTimeConverter(time: datetime) -> str | None:
"""
Convert a datetime object to a formatted string.
"""
if time:
return time.strftime("%a, %d %b %Y %I:%M:%S %Z%z")
def dateConverter(time):
def dateConverter(time: date) -> str | None:
"""
Convert a date object to a formatted string.
"""
if time:
return time.strftime("%a, %d %b %Y")
def create_csv_file(data):
def create_csv_file(data: List[List[str]]) -> str:
"""
Create a CSV file from the provided data.
"""
csv_buffer = io.StringIO()
csv_writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
@@ -41,11 +56,17 @@ def create_csv_file(data):
return csv_buffer.getvalue()
def create_json_file(data):
def create_json_file(data: List[dict]) -> str:
"""
Create a JSON file from the provided data.
"""
return json.dumps(data)
def create_xlsx_file(data):
def create_xlsx_file(data: List[List[str]]) -> bytes:
"""
Create an XLSX file from the provided data.
"""
workbook = Workbook()
sheet = workbook.active
@@ -58,7 +79,10 @@ def create_xlsx_file(data):
return xlsx_buffer.getvalue()
def create_zip_file(files):
def create_zip_file(files: List[tuple[str, str | bytes]]) -> io.BytesIO:
"""
Create a ZIP file from the provided files.
"""
zip_buffer = io.BytesIO()
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zipf:
for filename, file_content in files:
@@ -68,7 +92,13 @@ def create_zip_file(files):
return zip_buffer
def upload_to_s3(zip_file, workspace_id, token_id, slug):
# TODO: Change the upload_to_s3 function to use the new storage method with entry in file asset table
def upload_to_s3(
zip_file: io.BytesIO, workspace_id: UUID, token_id: str, slug: str
) -> None:
"""
Upload a ZIP file to S3 and generate a presigned URL.
"""
file_name = (
f"{workspace_id}/export-{slug}-{token_id[:6]}-{str(timezone.now().date())}.zip"
)
@@ -150,75 +180,85 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
exporter_instance.save(update_fields=["status", "url", "key"])
def generate_table_row(issue):
def generate_table_row(issue: dict) -> List[str]:
"""
Generate a table row from an issue dictionary.
"""
return [
f"""{issue["project__identifier"]}-{issue["sequence_id"]}""",
issue["project__name"],
f"""{issue["project_identifier"]}-{issue["sequence_id"]}""",
issue["project_name"],
issue["name"],
issue["description_stripped"],
issue["state__name"],
issue["description"],
issue["state_name"],
dateConverter(issue["start_date"]),
dateConverter(issue["target_date"]),
issue["priority"],
(
f"{issue['created_by__first_name']} {issue['created_by__last_name']}"
if issue["created_by__first_name"] and issue["created_by__last_name"]
else ""
),
(
f"{issue['assignees__first_name']} {issue['assignees__last_name']}"
if issue["assignees__first_name"] and issue["assignees__last_name"]
else ""
),
issue["labels__name"] if issue["labels__name"] else "",
issue["issue_cycle__cycle__name"],
dateConverter(issue["issue_cycle__cycle__start_date"]),
dateConverter(issue["issue_cycle__cycle__end_date"]),
issue["issue_module__module__name"],
dateConverter(issue["issue_module__module__start_date"]),
dateConverter(issue["issue_module__module__target_date"]),
issue["created_by"],
", ".join(issue["labels"]) if issue["labels"] else "",
issue["cycle_name"],
issue["cycle_start_date"],
issue["cycle_end_date"],
", ".join(issue.get("module_name", "")) if issue.get("module_name") else "",
dateTimeConverter(issue["created_at"]),
dateTimeConverter(issue["updated_at"]),
dateTimeConverter(issue["completed_at"]),
dateTimeConverter(issue["archived_at"]),
(
", ".join(
[
f"{comment['comment']} ({comment['created_at']} by {comment['created_by']})"
for comment in issue["comments"]
]
)
if issue["comments"]
else ""
),
issue["estimate"] if issue["estimate"] else "",
", ".join(issue["link"]) if issue["link"] else "",
", ".join(issue["assignees"]) if issue["assignees"] else "",
issue["subscribers_count"] if issue["subscribers_count"] else "",
issue["attachment_count"] if issue["attachment_count"] else "",
", ".join(issue["attachment_links"]) if issue["attachment_links"] else "",
]
def generate_json_row(issue):
def generate_json_row(issue: dict) -> dict:
"""
Generate a JSON row from an issue dictionary.
"""
return {
"ID": f"""{issue["project__identifier"]}-{issue["sequence_id"]}""",
"Project": issue["project__name"],
"ID": f"""{issue["project_identifier"]}-{issue["sequence_id"]}""",
"Project": issue["project_name"],
"Name": issue["name"],
"Description": issue["description_stripped"],
"State": issue["state__name"],
"Description": issue["description"],
"State": issue["state_name"],
"Start Date": dateConverter(issue["start_date"]),
"Target Date": dateConverter(issue["target_date"]),
"Priority": issue["priority"],
"Created By": (
f"{issue['created_by__first_name']} {issue['created_by__last_name']}"
if issue["created_by__first_name"] and issue["created_by__last_name"]
else ""
),
"Assignee": (
f"{issue['assignees__first_name']} {issue['assignees__last_name']}"
if issue["assignees__first_name"] and issue["assignees__last_name"]
else ""
),
"Labels": issue["labels__name"] if issue["labels__name"] else "",
"Cycle Name": issue["issue_cycle__cycle__name"],
"Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]),
"Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]),
"Module Name": issue["issue_module__module__name"],
"Module Start Date": dateConverter(issue["issue_module__module__start_date"]),
"Module Target Date": dateConverter(issue["issue_module__module__target_date"]),
"Created By": (f"{issue['created_by']}" if issue["created_by"] else ""),
"Assignee": issue["assignees"],
"Labels": issue["labels"],
"Cycle Name": issue["cycle_name"],
"Cycle Start Date": issue["cycle_start_date"],
"Cycle End Date": issue["cycle_end_date"],
"Module Name": issue["module_name"],
"Created At": dateTimeConverter(issue["created_at"]),
"Updated At": dateTimeConverter(issue["updated_at"]),
"Completed At": dateTimeConverter(issue["completed_at"]),
"Archived At": dateTimeConverter(issue["archived_at"]),
"Comments": issue["comments"],
"Estimate": issue["estimate"],
"Link": issue["link"],
"Subscribers Count": issue["subscribers_count"],
"Attachment Count": issue["attachment_count"],
"Attachment Links": issue["attachment_links"],
}
def update_json_row(rows, row):
def update_json_row(rows: List[dict], row: dict) -> None:
"""
Update the json row with the new assignee and label.
"""
matched_index = next(
(
index
@@ -247,7 +287,10 @@ def update_json_row(rows, row):
rows.append(row)
def update_table_row(rows, row):
def update_table_row(rows: List[List[str]], row: List[str]) -> None:
"""
Update the table row with the new assignee and label.
"""
matched_index = next(
(index for index, existing_row in enumerate(rows) if existing_row[0] == row[0]),
None,
@@ -269,7 +312,12 @@ def update_table_row(rows, row):
rows.append(row)
def generate_csv(header, project_id, issues, files):
def generate_csv(
header: List[str],
project_id: str,
issues: List[dict],
files: List[tuple[str, str | bytes]],
) -> None:
"""
Generate CSV export for all the passed issues.
"""
@@ -281,7 +329,15 @@ def generate_csv(header, project_id, issues, files):
files.append((f"{project_id}.csv", csv_file))
def generate_json(header, project_id, issues, files):
def generate_json(
header: List[str],
project_id: str,
issues: List[dict],
files: List[tuple[str, str | bytes]],
) -> None:
"""
Generate JSON export for all the passed issues.
"""
rows = []
for issue in issues:
row = generate_json_row(issue)
@@ -290,68 +346,169 @@ def generate_json(header, project_id, issues, files):
files.append((f"{project_id}.json", json_file))
def generate_xlsx(header, project_id, issues, files):
def generate_xlsx(
header: List[str],
project_id: str,
issues: List[dict],
files: List[tuple[str, str | bytes]],
) -> None:
"""
Generate XLSX export for all the passed issues.
"""
rows = [header]
for issue in issues:
row = generate_table_row(issue)
update_table_row(rows, row)
xlsx_file = create_xlsx_file(rows)
files.append((f"{project_id}.xlsx", xlsx_file))
def get_created_by(obj: Issue | IssueComment) -> str:
"""
Get the created by user for the given object.
"""
if obj.created_by:
return f"{obj.created_by.first_name} {obj.created_by.last_name}"
return ""
@shared_task
def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, slug):
def issue_export_task(
provider: str,
workspace_id: UUID,
project_ids: List[str],
token_id: str,
multiple: bool,
slug: str,
):
"""
Export issues from the workspace.
provider (str): The provider to export the issues to csv | json | xlsx.
token_id (str): The export object token id.
multiple (bool): Whether to export the issues to multiple files per project.
"""
try:
exporter_instance = ExporterHistory.objects.get(token=token_id)
exporter_instance.status = "processing"
exporter_instance.save(update_fields=["status"])
# Base query to get the issues
workspace_issues = (
(
Issue.objects.filter(
workspace__id=workspace_id,
project_id__in=project_ids,
project__project_projectmember__member=exporter_instance.initiated_by_id,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.select_related("project", "workspace", "state", "parent", "created_by")
.prefetch_related(
"assignees", "labels", "issue_cycle__cycle", "issue_module__module"
)
.values(
"id",
"project__identifier",
"project__name",
"project__id",
"sequence_id",
"name",
"description_stripped",
"priority",
"start_date",
"target_date",
"state__name",
"created_at",
"updated_at",
"completed_at",
"archived_at",
"issue_cycle__cycle__name",
"issue_cycle__cycle__start_date",
"issue_cycle__cycle__end_date",
"issue_module__module__name",
"issue_module__module__start_date",
"issue_module__module__target_date",
"created_by__first_name",
"created_by__last_name",
"assignees__first_name",
"assignees__last_name",
"labels__name",
)
Issue.objects.filter(
workspace__id=workspace_id,
project_id__in=project_ids,
project__project_projectmember__member=exporter_instance.initiated_by_id,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.select_related(
"project",
"workspace",
"state",
"parent",
"created_by",
"estimate_point",
)
.prefetch_related(
"labels",
"issue_cycle__cycle",
"issue_module__module",
"issue_comments",
"assignees",
Prefetch(
"assignees",
queryset=User.objects.only("first_name", "last_name").distinct(),
to_attr="assignee_details",
),
Prefetch(
"labels",
queryset=Label.objects.only("name").distinct(),
to_attr="label_details",
),
"issue_subscribers",
"issue_link",
)
.order_by("project__identifier", "sequence_id")
.distinct()
)
# CSV header
# Get the attachments for the issues
file_assets = FileAsset.objects.filter(
issue_id__in=workspace_issues.values_list("id", flat=True),
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
).annotate(work_item_id=F("issue_id"), asset_id=F("id"))
# Create a dictionary to store the attachments for the issues
attachment_dict = defaultdict(list)
for asset in file_assets:
attachment_dict[asset.work_item_id].append(asset.asset_id)
# Create a list to store the issues data
issues_data = []
# Iterate over the issues
for issue in workspace_issues:
attachments = attachment_dict.get(issue.id, [])
issue_data = {
"id": issue.id,
"project_identifier": issue.project.identifier,
"project_name": issue.project.name,
"project_id": issue.project.id,
"sequence_id": issue.sequence_id,
"name": issue.name,
"description": issue.description_stripped,
"priority": issue.priority,
"start_date": issue.start_date,
"target_date": issue.target_date,
"state_name": issue.state.name if issue.state else None,
"created_at": issue.created_at,
"updated_at": issue.updated_at,
"completed_at": issue.completed_at,
"archived_at": issue.archived_at,
"module_name": [
module.module.name for module in issue.issue_module.all()
],
"created_by": get_created_by(issue),
"labels": [label.name for label in issue.label_details],
"comments": [
{
"comment": comment.comment_stripped,
"created_at": dateConverter(comment.created_at),
"created_by": get_created_by(comment),
}
for comment in issue.issue_comments.all()
],
"estimate": issue.estimate_point.value
if issue.estimate_point and issue.estimate_point.value
else "",
"link": [link.url for link in issue.issue_link.all()],
"assignees": [
f"{assignee.first_name} {assignee.last_name}"
for assignee in issue.assignee_details
],
"subscribers_count": issue.issue_subscribers.count(),
"attachment_count": len(attachments),
"attachment_links": [
f"/api/assets/v2/workspaces/{issue.workspace.slug}/projects/{issue.project_id}/issues/{issue.id}/attachments/{asset}/"
for asset in attachments
],
}
# Get Cycles data for the issue
cycle = issue.issue_cycle.last()
if cycle:
# Update cycle data
issue_data["cycle_name"] = cycle.cycle.name
issue_data["cycle_start_date"] = dateConverter(cycle.cycle.start_date)
issue_data["cycle_end_date"] = dateConverter(cycle.cycle.end_date)
else:
issue_data["cycle_name"] = ""
issue_data["cycle_start_date"] = ""
issue_data["cycle_end_date"] = ""
issues_data.append(issue_data)
# CSV header
header = [
"ID",
"Project",
@@ -362,20 +519,25 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
"Target Date",
"Priority",
"Created By",
"Assignee",
"Labels",
"Cycle Name",
"Cycle Start Date",
"Cycle End Date",
"Module Name",
"Module Start Date",
"Module Target Date",
"Created At",
"Updated At",
"Completed At",
"Archived At",
"Comments",
"Estimate",
"Link",
"Assignees",
"Subscribers Count",
"Attachment Count",
"Attachment Links",
]
# Map the provider to the function
EXPORTER_MAPPER = {
"csv": generate_csv,
"json": generate_json,
@@ -384,8 +546,13 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
files = []
if multiple:
project_dict = defaultdict(list)
for issue in issues_data:
project_dict[str(issue["project_id"])].append(issue)
for project_id in project_ids:
issues = workspace_issues.filter(project__id=project_id)
issues = project_dict.get(str(project_id), [])
exporter = EXPORTER_MAPPER.get(provider)
if exporter is not None:
exporter(header, project_id, issues, files)
@@ -393,7 +560,7 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
else:
exporter = EXPORTER_MAPPER.get(provider)
if exporter is not None:
exporter(header, workspace_id, workspace_issues, files)
exporter(header, workspace_id, issues_data, files)
zip_buffer = create_zip_file(files)
upload_to_s3(zip_buffer, workspace_id, token_id, slug)

View File

@@ -63,7 +63,7 @@ def forgot_password(first_name, email, uidb64, token, current_site):
)
msg.attach_alternative(html_content, "text/html")
msg.send()
logging.getLogger("plane").info("Email sent successfully")
logging.getLogger("plane.worker").info("Email sent successfully")
return
except Exception as e:
log_exception(e)

View File

@@ -53,7 +53,7 @@ def magic_link(email, key, token):
)
msg.attach_alternative(html_content, "text/html")
msg.send()
logging.getLogger("plane").info("Email sent successfully.")
logging.getLogger("plane.worker").info("Email sent successfully.")
return
except Exception as e:
log_exception(e)

View File

@@ -80,7 +80,7 @@ def project_add_user_email(current_site, project_member_id, invitor_id):
# Send the email
msg.send()
# Log the success
logging.getLogger("plane").info("Email sent successfully.")
logging.getLogger("plane.worker").info("Email sent successfully.")
return
except Exception as e:
log_exception(e)

View File

@@ -76,7 +76,7 @@ def project_invitation(email, project_id, token, current_site, invitor):
msg.attach_alternative(html_content, "text/html")
msg.send()
logging.getLogger("plane").info("Email sent successfully.")
logging.getLogger("plane.worker").info("Email sent successfully.")
return
except (Project.DoesNotExist, ProjectMemberInvite.DoesNotExist):
return

View File

@@ -58,7 +58,7 @@ def user_activation_email(current_site, user_id):
msg.attach_alternative(html_content, "text/html")
msg.send()
logging.getLogger("plane").info("Email sent successfully.")
logging.getLogger("plane.worker").info("Email sent successfully.")
return
except Exception as e:
log_exception(e)

View File

@@ -60,7 +60,7 @@ def user_deactivation_email(current_site, user_id):
# Attach HTML content
msg.attach_alternative(html_content, "text/html")
msg.send()
logging.getLogger("plane").info("Email sent successfully.")
logging.getLogger("plane.worker").info("Email sent successfully.")
return
except Exception as e:
log_exception(e)

View File

@@ -0,0 +1,177 @@
# Python imports
import logging
# Third party imports
from celery import shared_task
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse, urljoin
import base64
import ipaddress
from typing import Dict, Any
from typing import Optional
from plane.db.models import IssueLink
from plane.utils.exception_logger import log_exception
logger = logging.getLogger("plane.worker")
DEFAULT_FAVICON = "PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyNCIgaGVpZ2h0PSIyNCIgdmlld0JveD0iMCAwIDI0IDI0IiBmaWxsPSJub25lIiBzdHJva2U9ImN1cnJlbnRDb2xvciIgc3Ryb2tlLXdpZHRoPSIyIiBzdHJva2UtbGluZWNhcD0icm91bmQiIHN0cm9rZS1saW5lam9pbj0icm91bmQiIGNsYXNzPSJsdWNpZGUgbHVjaWRlLWxpbmstaWNvbiBsdWNpZGUtbGluayI+PHBhdGggZD0iTTEwIDEzYTUgNSAwIDAgMCA3LjU0LjU0bDMtM2E1IDUgMCAwIDAtNy4wNy03LjA3bC0xLjcyIDEuNzEiLz48cGF0aCBkPSJNMTQgMTFhNSA1IDAgMCAwLTcuNTQtLjU0bC0zIDNhNSA1IDAgMCAwIDcuMDcgNy4wN2wxLjcxLTEuNzEiLz48L3N2Zz4=" # noqa: E501
def crawl_work_item_link_title_and_favicon(url: str) -> Dict[str, Any]:
"""
Crawls a URL to extract the title and favicon.
Args:
url (str): The URL to crawl
Returns:
str: JSON string containing title and base64-encoded favicon
"""
try:
# Prevent access to private IP ranges
parsed = urlparse(url)
try:
ip = ipaddress.ip_address(parsed.hostname)
if ip.is_private or ip.is_loopback or ip.is_reserved:
raise ValueError("Access to private/internal networks is not allowed")
except ValueError:
# Not an IP address, continue with domain validation
pass
# Set up headers to mimic a real browser
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36" # noqa: E501
}
soup = None
title = None
try:
response = requests.get(url, headers=headers, timeout=1)
soup = BeautifulSoup(response.content, "html.parser")
title_tag = soup.find("title")
title = title_tag.get_text().strip() if title_tag else None
except requests.RequestException as e:
logger.warning(f"Failed to fetch HTML for title: {str(e)}")
# Fetch and encode favicon
favicon_base64 = fetch_and_encode_favicon(headers, soup, url)
# Prepare result
result = {
"title": title,
"favicon": favicon_base64["favicon_base64"],
"url": url,
"favicon_url": favicon_base64["favicon_url"],
}
return result
except Exception as e:
log_exception(e)
return {
"error": f"Unexpected error: {str(e)}",
"title": None,
"favicon": None,
"url": url,
}
def find_favicon_url(soup: Optional[BeautifulSoup], base_url: str) -> Optional[str]:
"""
Find the favicon URL from HTML soup.
Args:
soup: BeautifulSoup object
base_url: Base URL for resolving relative paths
Returns:
str: Absolute URL to favicon or None
"""
if soup is not None:
# Look for various favicon link tags
favicon_selectors = [
'link[rel="icon"]',
'link[rel="shortcut icon"]',
'link[rel="apple-touch-icon"]',
'link[rel="apple-touch-icon-precomposed"]',
]
for selector in favicon_selectors:
favicon_tag = soup.select_one(selector)
if favicon_tag and favicon_tag.get("href"):
return urljoin(base_url, favicon_tag["href"])
# Fallback to /favicon.ico
parsed_url = urlparse(base_url)
fallback_url = f"{parsed_url.scheme}://{parsed_url.netloc}/favicon.ico"
# Check if fallback exists
try:
response = requests.head(fallback_url, timeout=2)
if response.status_code == 200:
return fallback_url
except requests.RequestException as e:
log_exception(e)
return None
return None
def fetch_and_encode_favicon(
headers: Dict[str, str], soup: Optional[BeautifulSoup], url: str
) -> Dict[str, Optional[str]]:
"""
Fetch favicon and encode it as base64.
Args:
favicon_url: URL to the favicon
headers: Request headers
Returns:
str: Base64 encoded favicon with data URI prefix or None
"""
try:
favicon_url = find_favicon_url(soup, url)
if favicon_url is None:
return {
"favicon_url": None,
"favicon_base64": f"data:image/svg+xml;base64,{DEFAULT_FAVICON}",
}
response = requests.get(favicon_url, headers=headers, timeout=1)
# Get content type
content_type = response.headers.get("content-type", "image/x-icon")
# Convert to base64
favicon_base64 = base64.b64encode(response.content).decode("utf-8")
# Return as data URI
return {
"favicon_url": favicon_url,
"favicon_base64": f"data:{content_type};base64,{favicon_base64}",
}
except Exception as e:
logger.warning(f"Failed to fetch favicon: {e}")
return {
"favicon_url": None,
"favicon_base64": f"data:image/svg+xml;base64,{DEFAULT_FAVICON}",
}
@shared_task
def crawl_work_item_link_title(id: str, url: str) -> None:
meta_data = crawl_work_item_link_title_and_favicon(url)
issue_link = IssueLink.objects.get(id=id)
issue_link.metadata = meta_data
issue_link.save()

View File

@@ -78,7 +78,7 @@ def workspace_invitation(email, workspace_id, token, current_site, inviter):
)
msg.attach_alternative(html_content, "text/html")
msg.send()
logging.getLogger("plane").info("Email sent successfully")
logging.getLogger("plane.worker").info("Email sent successfully")
return
except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist):
return

View File

@@ -5,7 +5,9 @@ from plane.db.models import Workspace
class Command(BaseCommand):
help = "Updates the slug of a soft-deleted workspace by appending the epoch timestamp"
help = (
"Updates the slug of a soft-deleted workspace by appending the epoch timestamp"
)
def add_arguments(self, parser):
parser.add_argument(
@@ -75,4 +77,4 @@ class Command(BaseCommand):
self.style.ERROR(
f"Error updating workspace '{workspace.name}': {str(e)}"
)
)
)

View File

@@ -0,0 +1,23 @@
# Generated by Django 4.2.14 on 2025-05-09 11:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('db', '0094_auto_20250425_0902'),
]
operations = [
migrations.AddField(
model_name='page',
name='external_id',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='page',
name='external_source',
field=models.CharField(blank=True, max_length=255, null=True),
),
]

View File

@@ -0,0 +1,23 @@
# Generated by Django 4.2.20 on 2025-05-21 13:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("db", "0095_page_external_id_page_external_source"),
]
operations = [
migrations.AddField(
model_name="user",
name="is_email_valid",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="user",
name="masked_at",
field=models.DateTimeField(null=True),
),
]

View File

@@ -0,0 +1,23 @@
# Generated by Django 4.2.21 on 2025-06-06 12:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('db', '0096_user_is_email_valid_user_masked_at'),
]
operations = [
migrations.AddField(
model_name='project',
name='external_id',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='project',
name='external_source',
field=models.CharField(blank=True, max_length=255, null=True),
),
]

View File

@@ -82,4 +82,4 @@ from .label import Label
from .device import Device, DeviceSession
from .sticky import Sticky
from .sticky import Sticky

View File

@@ -18,22 +18,28 @@ class BaseModel(AuditModel):
class Meta:
abstract = True
def save(self, *args, **kwargs):
user = get_current_user()
def save(self, *args, created_by_id=None, disable_auto_set_user=False, **kwargs):
if not disable_auto_set_user:
# Check if created_by_id is provided
if created_by_id:
self.created_by_id = created_by_id
else:
user = get_current_user()
if user is None or user.is_anonymous:
self.created_by = None
self.updated_by = None
super(BaseModel, self).save(*args, **kwargs)
else:
# Check if the model is being created or updated
if self._state.adding:
# If created only set created_by value: set updated_by to None
self.created_by = user
self.updated_by = None
# If updated only set updated_by value don't touch created_by
self.updated_by = user
super(BaseModel, self).save(*args, **kwargs)
if user is None or user.is_anonymous:
self.created_by = None
self.updated_by = None
else:
# Check if the model is being created or updated
if self._state.adding:
# If creating, set created_by and leave updated_by as None
self.created_by = user
self.updated_by = None
else:
# If updating, set updated_by only
self.updated_by = user
super(BaseModel, self).save(*args, **kwargs)
def __str__(self):
return str(self.id)

View File

@@ -17,6 +17,11 @@ def get_view_props():
class Page(BaseModel):
PRIVATE_ACCESS = 1
PUBLIC_ACCESS = 0
ACCESS_CHOICES = ((PRIVATE_ACCESS, "Private"), (PUBLIC_ACCESS, "Public"))
workspace = models.ForeignKey(
"db.Workspace", on_delete=models.CASCADE, related_name="pages"
)
@@ -53,6 +58,9 @@ class Page(BaseModel):
moved_to_page = models.UUIDField(null=True, blank=True)
moved_to_project = models.UUIDField(null=True, blank=True)
external_id = models.CharField(max_length=255, null=True, blank=True)
external_source = models.CharField(max_length=255, null=True, blank=True)
class Meta:
verbose_name = "Page"
verbose_name_plural = "Pages"
@@ -91,9 +99,7 @@ class PageLog(BaseModel):
transaction = models.UUIDField(default=uuid.uuid4)
page = models.ForeignKey(Page, related_name="page_log", on_delete=models.CASCADE)
entity_identifier = models.UUIDField(null=True)
entity_name = models.CharField(
max_length=30, verbose_name="Transaction Type"
)
entity_name = models.CharField(max_length=30, verbose_name="Transaction Type")
workspace = models.ForeignKey(
"db.Workspace", on_delete=models.CASCADE, related_name="workspace_page_log"
)

View File

@@ -122,6 +122,9 @@ class Project(BaseModel):
# timezone
TIMEZONE_CHOICES = tuple(zip(pytz.all_timezones, pytz.all_timezones))
timezone = models.CharField(max_length=255, default="UTC", choices=TIMEZONE_CHOICES)
# external_id for imports
external_source = models.CharField(max_length=255, null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True)
@property
def cover_image_url(self):

View File

@@ -106,6 +106,12 @@ class User(AbstractBaseUser, PermissionsMixin):
max_length=255, default="UTC", choices=USER_TIMEZONE_CHOICES
)
# email validation
is_email_valid = models.BooleanField(default=False)
# masking
masked_at = models.DateTimeField(null=True)
USERNAME_FIELD = "email"
REQUIRED_FIELDS = ["username"]

View File

@@ -153,12 +153,8 @@ class Workspace(BaseModel):
return None
def delete(
self,
using: Optional[str] = None,
soft: bool = True,
*args: Any,
**kwargs: Any
):
self, using: Optional[str] = None, soft: bool = True, *args: Any, **kwargs: Any
):
"""
Override the delete method to append epoch timestamp to the slug when soft deleting.
@@ -172,7 +168,7 @@ class Workspace(BaseModel):
result = super().delete(using=using, soft=soft, *args, **kwargs)
# If it's a soft delete and the model still exists (not hard deleted)
if soft and hasattr(self, 'deleted_at') and self.deleted_at:
if soft and hasattr(self, "deleted_at") and self.deleted_at:
# Use the deleted_at timestamp to update the slug
deletion_timestamp: int = int(self.deleted_at.timestamp())
self.slug = f"{self.slug}__{deletion_timestamp}"

View File

@@ -57,7 +57,7 @@ class InstanceEndpoint(BaseAPIView):
POSTHOG_API_KEY,
POSTHOG_HOST,
UNSPLASH_ACCESS_KEY,
OPENAI_API_KEY,
LLM_API_KEY,
IS_INTERCOM_ENABLED,
INTERCOM_APP_ID,
) = get_configuration_value(
@@ -112,8 +112,8 @@ class InstanceEndpoint(BaseAPIView):
"default": os.environ.get("UNSPLASH_ACCESS_KEY", ""),
},
{
"key": "OPENAI_API_KEY",
"default": os.environ.get("OPENAI_API_KEY", ""),
"key": "LLM_API_KEY",
"default": os.environ.get("LLM_API_KEY", ""),
},
# Intercom settings
{
@@ -151,7 +151,7 @@ class InstanceEndpoint(BaseAPIView):
data["has_unsplash_configured"] = bool(UNSPLASH_ACCESS_KEY)
# Open AI settings
data["has_openai_configured"] = bool(OPENAI_API_KEY)
data["has_llm_configured"] = bool(LLM_API_KEY)
# File size settings
data["file_size_limit"] = float(os.environ.get("FILE_SIZE_LIMIT", 5242880))

View File

@@ -157,7 +157,7 @@ class Command(BaseCommand):
},
# Deprecated, use LLM_MODEL
{
"key": "GPT_ENGINE",
"key": "GPT_ENGINE",
"value": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"),
"category": "SMTP",
"is_encrypted": False,

View File

@@ -83,6 +83,32 @@ class APITokenLogMiddleware:
self.process_request(request, response, request_body)
return response
def _safe_decode_body(self, content):
"""
Safely decodes request/response body content, handling binary data.
Returns None if content is None, or a string representation of the content.
"""
# If the content is None, return None
if content is None:
return None
# If the content is an empty bytes object, return None
if content == b"":
return None
# Check if content is binary by looking for common binary file signatures
if (
content.startswith(b"\x89PNG")
or content.startswith(b"\xff\xd8\xff")
or content.startswith(b"%PDF")
):
return "[Binary Content]"
try:
return content.decode("utf-8")
except UnicodeDecodeError:
return "[Could not decode content]"
def process_request(self, request, response, request_body):
api_key_header = "X-Api-Key"
api_key = request.headers.get(api_key_header)
@@ -95,9 +121,13 @@ class APITokenLogMiddleware:
method=request.method,
query_params=request.META.get("QUERY_STRING", ""),
headers=str(request.headers),
body=(request_body.decode("utf-8") if request_body else None),
body=(
self._safe_decode_body(request_body) if request_body else None
),
response_body=(
response.content.decode("utf-8") if response.content else None
self._safe_decode_body(response.content)
if response.content
else None
),
response_code=response.status_code,
ip_address=get_client_ip(request=request),

View File

@@ -3,7 +3,7 @@
# Python imports
import os
from urllib.parse import urlparse
from urllib.parse import urljoin
# Third party imports
import dj_database_url
@@ -13,6 +13,10 @@ from django.core.management.utils import get_random_secret_key
from corsheaders.defaults import default_headers
# Module imports
from plane.utils.url import is_valid_url
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Secret Key
@@ -310,15 +314,35 @@ CSRF_TRUSTED_ORIGINS = cors_allowed_origins
CSRF_COOKIE_DOMAIN = os.environ.get("COOKIE_DOMAIN", None)
CSRF_FAILURE_VIEW = "plane.authentication.views.common.csrf_failure"
# Base URLs
###### Base URLs ######
# Admin Base URL
ADMIN_BASE_URL = os.environ.get("ADMIN_BASE_URL", None)
ADMIN_BASE_PATH = os.environ.get("ADMIN_BASE_PATH", None)
if ADMIN_BASE_URL and not is_valid_url(ADMIN_BASE_URL):
ADMIN_BASE_URL = None
ADMIN_BASE_PATH = os.environ.get("ADMIN_BASE_PATH", "/god-mode/")
# Space Base URL
SPACE_BASE_URL = os.environ.get("SPACE_BASE_URL", None)
SPACE_BASE_PATH = os.environ.get("SPACE_BASE_PATH", None)
APP_BASE_URL = os.environ.get("APP_BASE_URL")
APP_BASE_PATH = os.environ.get("APP_BASE_PATH", None)
LIVE_BASE_URL = os.environ.get("LIVE_BASE_URL")
LIVE_BASE_PATH = os.environ.get("LIVE_BASE_PATH")
if SPACE_BASE_URL and not is_valid_url(SPACE_BASE_URL):
SPACE_BASE_URL = None
SPACE_BASE_PATH = os.environ.get("SPACE_BASE_PATH", "/spaces/")
# App Base URL
APP_BASE_URL = os.environ.get("APP_BASE_URL", None)
if APP_BASE_URL and not is_valid_url(APP_BASE_URL):
APP_BASE_URL = None
APP_BASE_PATH = os.environ.get("APP_BASE_PATH", "/")
# Live Base URL
LIVE_BASE_URL = os.environ.get("LIVE_BASE_URL", None)
if LIVE_BASE_URL and not is_valid_url(LIVE_BASE_URL):
LIVE_BASE_URL = None
LIVE_BASE_PATH = os.environ.get("LIVE_BASE_PATH", "/live/")
LIVE_URL = urljoin(LIVE_BASE_URL, LIVE_BASE_PATH) if LIVE_BASE_URL else None
# WEB URL
WEB_URL = os.environ.get("WEB_URL")
HARD_DELETE_AFTER_DAYS = int(os.environ.get("HARD_DELETE_AFTER_DAYS", 60))

View File

@@ -32,7 +32,6 @@ class S3Storage(S3Boto3Storage):
) or os.environ.get("MINIO_ENDPOINT_URL")
if os.environ.get("USE_MINIO") == "1":
# Determine protocol based on environment variable
if os.environ.get("MINIO_ENDPOINT_SSL") == "1":
endpoint_protocol = "https"

View File

@@ -135,7 +135,7 @@ def issue_on_results(
default=None,
output_field=JSONField(),
),
filter=Q(votes__isnull=False,votes__deleted_at__isnull=True),
filter=Q(votes__isnull=False, votes__deleted_at__isnull=True),
distinct=True,
),
reaction_items=ArrayAgg(
@@ -169,7 +169,9 @@ def issue_on_results(
default=None,
output_field=JSONField(),
),
filter=Q(issue_reactions__isnull=False, issue_reactions__deleted_at__isnull=True),
filter=Q(
issue_reactions__isnull=False, issue_reactions__deleted_at__isnull=True
),
distinct=True,
),
).values(*required_fields, "vote_items", "reaction_items")

View File

@@ -21,6 +21,7 @@ from plane.app.serializers import (
)
from plane.utils.issue_filters import issue_filters
from plane.bgtasks.issue_activities_task import issue_activity
from plane.db.models.intake import SourceType
class IntakeIssuePublicViewSet(BaseViewSet):
@@ -156,7 +157,7 @@ class IntakeIssuePublicViewSet(BaseViewSet):
intake_id=intake_id,
project_id=project_deploy_board.project_id,
issue=issue,
source=request.data.get("source", "IN-APP"),
source=SourceType.IN_APP,
)
serializer = IssueStateIntakeSerializer(issue)

View File

@@ -179,7 +179,7 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
Q(issue_intake__status=1)
| Q(issue_intake__status=-1)
| Q(issue_intake__status=2)
| Q(issue_intake__status=True),
| Q(issue_intake__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
@@ -205,7 +205,7 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
Q(issue_intake__status=1)
| Q(issue_intake__status=-1)
| Q(issue_intake__status=2)
| Q(issue_intake__status=True),
| Q(issue_intake__isnull=True),
archived_at__isnull=True,
is_draft=False,
),

View File

@@ -14,9 +14,7 @@ class ProjectMetaDataEndpoint(BaseAPIView):
def get(self, request, anchor):
try:
deploy_board = DeployBoard.objects.get(
anchor=anchor, entity_name="project"
)
deploy_board = DeployBoard.objects.get(anchor=anchor, entity_name="project")
except DeployBoard.DoesNotExist:
return Response(
{"error": "Project is not published"}, status=status.HTTP_404_NOT_FOUND

Some files were not shown because too many files have changed in this diff Show More