Compare commits

...

74 Commits

Author SHA1 Message Date
Satish Gandham
c54f171c5a Sync issues in sequence. 2024-12-16 16:08:29 +05:30
Satish Gandham
de0dbc0be5 Create indexes in sequence 2024-12-16 16:08:05 +05:30
Manish Gupta
47a76f48b4 fix: separated docker compose environment variables (#5575)
* Separated environment variables for specific app containers.

* updated env

* cleanup

* Separated environment variables for specific app containers.

* updated env

* cleanup

---------

Co-authored-by: Akshat Jain <akshatjain9782@gmail.com>
2024-12-16 13:23:33 +05:30
Manish Gupta
a0f03d07f3 chore: Check github releases for upgrades (#6162)
* modifed action and install.sh for selfhost

* updated selfhost readme and install.sh

* fixes

* changes suggested by code-rabbit

* chore: updated powered by (#6160)

* improvement: update fetch map during workspace-level module fetch to reduce redundant API calls (#6159)

* fix: remove unwanted states fetching logic to avoid multiple API calls. (#6158)

* chore remove unnecessary CTA (#6161)

* fix: build branch workflow upload artifacts

* fixes

* changes suggested by code-rabbit

* modifed action and install.sh for selfhost

* updated selfhost readme and install.sh

* fix: build branch workflow upload artifacts

* fixes

* changes suggested by code-rabbit

---------

Co-authored-by: guru_sainath <gurusainath007@gmail.com>
Co-authored-by: Prateek Shourya <prateekshourya29@gmail.com>
Co-authored-by: rahulramesha <71900764+rahulramesha@users.noreply.github.com>
Co-authored-by: sriram veeraghanta <veeraghanta.sriram@gmail.com>
2024-12-16 13:22:23 +05:30
Nikhil
74b2ec03ff feat: add language support (#6205) 2024-12-15 11:04:03 +05:30
guru_sainath
5908998127 [WEB-2854] chore: trigger issue_description_version task on issue create and update (#6202)
* chore: issue description version task trigger from issue create and update

* chore: add default value in prop
2024-12-13 22:30:29 +05:30
guru_sainath
df6a80e7ae chore: add sync jobs for issue_version and issue_description_version tables (#6199)
* chore: added fields in issue_version and profile tables and created a new sticky table

* chore: removed point in issue version

* chore: add imports in init

* chore: added sync jobs for issue_version and issue_description_version

* chore: removed logs

* chore: updated logginh

---------

Co-authored-by: sainath <sainath@sainaths-MacBook-Pro.local>
2024-12-13 17:48:55 +05:30
guru_sainath
6ff258ceca chore: Add fields to issue_version and profile tables, and create new sticky table (#6198)
* chore: added fields in issue_version and profile tables and created a new sticky table

* chore: removed point in issue version

* chore: add imports in init

---------

Co-authored-by: sainath <sainath@sainaths-MacBook-Pro.local>
2024-12-13 17:30:25 +05:30
Saurabhkmr98
a8140a5f08 chore: Add logger package for node server side apps (#6188)
* chore: Add logger as a package

* chore: Add logger package for node server side apps

* remove plane logger import in web

* resolve pr reviews and add client logger with readme update

* fix: transformation and added middleware for logging requests

* chore: update readme

* fix: env configurable max file size

---------

Co-authored-by: sriram veeraghanta <veeraghanta.sriram@gmail.com>
2024-12-13 14:32:56 +05:30
Prateek Shourya
9234f21f26 [WEB-2848] improvement: enhanced components modularity (#6196)
* improvement: enhanced componenets modularity

* fix: lint errors resolved
2024-12-13 14:26:26 +05:30
Bavisetti Narayan
ab11e83535 [WEB-2843] chore: updated the cycle end date logic (#6194)
* chore: updated the cycle end date logic

* chore: changed the key for timezone
2024-12-13 13:34:07 +05:30
Akshita Goyal
b4112358ac [WEB-2688] chore: added icons and splitted issue header (#6195)
* chore: added icons and splitted issue header

* fix: added ee filler component

* fix: component name fixed

* fix: removed dupes

* fix: casing
2024-12-13 13:31:13 +05:30
Aaryan Khandelwal
77239ebcd4 fix: GitHub casing across the platform (#6193) 2024-12-13 02:22:46 +05:30
Prateek Shourya
54f828cbfa refactor: enhance components modularity and introduce new UI componenets (#6192)
* feat: add navigation dropdown component

* chore: enhance title/ description loader and componenet modularity

* chore: issue store filter update

* chore: added few icons to ui package

* chore: improvements for tabs componenet

* chore: enhance sidebar modularity

* chore: update issue and router store to add support for additional issue layouts

* chore: enhanced cycle componenets modularity

* feat: added project grouping header for cycles list

* chore: enhanced project dropdown componenet by adding multiple selection functionality

* chore: enhanced rich text editor modularity by taking members ids as props for mentions

* chore: added functionality to filter disabled layouts in issue-layout dropdown

* chore: added support to pass project ids as props in project card list

* feat: multi select project modal

* chore: seperate out project componenet for reusability

* chore: command pallete store improvements

* fix: build errors
2024-12-12 21:40:57 +05:30
Bavisetti Narayan
9ad8b43408 chore: handled the cycle date time using project timezone (#6187)
* chore: handled the cycle date time using project timezone

* chore: reverted the frontend commit
2024-12-12 14:11:12 +05:30
Prateek Shourya
38e8a5c807 fix: command palette build (#6186) 2024-12-11 18:19:09 +05:30
Prateek Shourya
a9bd2e243a refactor: enhance command palette modularity (#6139)
* refactor: enhance command palette modularity

* chore: minor updates to command palette store
2024-12-11 18:02:58 +05:30
Vamsi Krishna
ca0d50b229 fix: no activity while moving inbox issues (#6185) 2024-12-11 17:57:27 +05:30
Vamsi Krishna
7fca7fd86c [WEB-2774] fix:favorites reorder (#6179)
* fix:favorites reorder

* chore: added error handling

Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>

---------

Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
2024-12-11 16:29:39 +05:30
Prateek Shourya
0ac68f2731 improvement: refactored issue grouping logic to access MobX store directly (#6134)
* improvement: refactored issue grouping logic to access MobX store directly

* chore: minor updates
2024-12-11 15:14:15 +05:30
rahulramesha
5a9ae66680 chore: Remove shouldIgnoreDependencies flags while dragging in timeline view (#6150)
* remove shouldEnable dependency flags for timeline view

* chore: error handling

---------

Co-authored-by: Prateek Shourya <prateekshourya29@gmail.com>
2024-12-11 13:43:48 +05:30
Vamsi Krishna
134644fdf1 [WEB-2382]chore:notification files restructuring (#6181)
* chore: adjusted  increment/decrement  for unread count

* chore: improved param handling for unread notification count function

* chore:file restructuring

* fix:notification types

* chore:file restructuring

* chore:modified notfication types

* chore: modified types for notification

* chore:removed redundant checks for id
2024-12-11 13:41:19 +05:30
sriram veeraghanta
d0f3987aeb fix: instance changelog url updated 2024-12-10 21:03:44 +05:30
sriram veeraghanta
f06b1b8c4a fix: updated package version 2024-12-10 21:02:29 +05:30
sriram veeraghanta
6e56ea4c60 fix: updated changelog url in apiserver 2024-12-10 20:28:51 +05:30
Anmol Singh Bhatia
216a69f991 chore: workspace draft and inbox issue local db mutation (#6180) 2024-12-10 19:12:24 +05:30
Vihar Kurama
205395e079 fix: changed checkboxes to toggles on notifications settings page (#6175) 2024-12-10 01:02:34 +05:30
Bavisetti Narayan
ff8bbed6f9 chore: changed the soft deletion logic (#6171) 2024-12-09 20:29:30 +05:30
Vamsi Krishna
d04619477b [WEB-2382]chore: notifications code improvement (#6172)
* chore: adjusted  increment/decrement  for unread count

* chore: improved param handling for unread notification count function
2024-12-09 18:06:56 +05:30
sriram veeraghanta
547c138084 fix: ui package module resolution 2024-12-09 15:56:20 +05:30
Anmol Singh Bhatia
5c907db0e2 [WEB-2818] chore: project navigation items code refactor (#6170)
* chore: project navigation items code refactor

* fix: build error

* chore: code refactor

* chore: code refactor
2024-12-09 14:37:04 +05:30
Aaryan Khandelwal
a85e592ada fix: creating a new sub-issue from workspace level (#6169) 2024-12-09 12:15:10 +05:30
sriram veeraghanta
b21d190ce0 fix: added github pull request template 2024-12-09 02:55:09 +05:30
sriram veeraghanta
cba41e0755 fix: upgrading the express version 2024-12-09 02:35:48 +05:30
sriram veeraghanta
02308eeb15 fix: django version upgrade 2024-12-09 02:28:06 +05:30
guru_sainath
9ee41ece98 fix: email check validation to handle case in-sensitive email (#6168) 2024-12-07 17:55:50 +05:30
Vamsi Krishna
666ddf73b6 [WEB-2382]chore:notification snooze modal (#6164)
* modified notification store

* notification snooze types fix

* handled promise

* modified notifications layout

* incresed pagination count for notifications
2024-12-06 16:27:45 +05:30
Satish Gandham
4499a5fa25 Sync issues and workspace data when the issue properties like labels/modules/cycles etc are deleted from the project (#6165) 2024-12-06 16:27:07 +05:30
sriram veeraghanta
727dd4002e fix: updated lint command in packages 2024-12-06 15:00:11 +05:30
sriram veeraghanta
4b5a2bc4e5 chore: lint related changes and packaging fixes (#6163)
* fix: lint related changes and packaging fixes

* adding color validations
2024-12-06 14:56:49 +05:30
sriram veeraghanta
b1c340b199 fix: build branch workflow upload artifacts 2024-12-05 16:51:20 +05:30
rahulramesha
a612a17d28 chore remove unnecessary CTA (#6161) 2024-12-05 16:37:55 +05:30
Prateek Shourya
d55ee6d5b8 fix: remove unwanted states fetching logic to avoid multiple API calls. (#6158) 2024-12-05 15:26:35 +05:30
Prateek Shourya
aa1e192a50 improvement: update fetch map during workspace-level module fetch to reduce redundant API calls (#6159) 2024-12-05 15:26:15 +05:30
guru_sainath
6cd8af1092 chore: updated powered by (#6160) 2024-12-05 15:12:37 +05:30
rahulramesha
66652a5d71 refactor project states to ake way for new features (#6156) 2024-12-05 12:46:51 +05:30
sriram veeraghanta
3bccda0c86 chore: formatting and typo fixes 2024-12-04 19:40:37 +05:30
sriram veeraghanta
fb3295f5f4 fix: sites opengraph title and description added 2024-12-04 17:58:23 +05:30
sriram veeraghanta
fa3aa362a9 fix: lint errors 2024-12-04 17:22:41 +05:30
Bavisetti Narayan
b73ea37798 chore: improve the cascading logic (#6152) 2024-12-04 16:15:57 +05:30
Vamsi Krishna
d537e560e3 [WEB-2802]fix: dorpdown visibility issue in safari (#6151)
* filters drop down fix safari

* added comments for translation

* fixed drop down visibility issue
2024-12-04 15:27:34 +05:30
guru_sainath
1b92a18ef8 chore: updated the ssr rendering on sites (#6145)
* fix: refactoring

* fix: site ssr implementation

* chore: fixed auto reload on file change in sites

* chore: updated constant imports and globalised powerBy component

* chore: resolved lint and updated the env

---------

Co-authored-by: sriram veeraghanta <veeraghanta.sriram@gmail.com>
2024-12-04 14:24:53 +05:30
rahulramesha
31b6d52417 fix root issue store to have updated url params at all times (#6147) 2024-12-04 13:57:33 +05:30
Vamsi Krishna
a153de34d6 fixed piority icons shape (#6144) 2024-12-04 13:57:14 +05:30
Aaryan Khandelwal
64a44f4fce style: add custom class to editor paragraph and heading blocks (#6143) 2024-12-04 13:43:52 +05:30
guru_sainath
bb8a156bdd fix: removed changelog endpoint (#6146) 2024-12-04 13:42:15 +05:30
Akshita Goyal
f02a2b04a5 fix: export btn overlap issue (#6149) 2024-12-04 13:41:48 +05:30
Bavisetti Narayan
b6ab853c57 chore: filter out the removed cycle from issue detail (#6138) 2024-12-03 16:48:14 +05:30
Aaryan Khandelwal
fe43300aa7 fix: pages empty state authorization (#6141) 2024-12-03 14:53:02 +05:30
Prateek Shourya
849d9891d2 chore: community edition product updates link (#6132)
* chore: community edition product updates link

* fix: iframe embed for changelog

---------

Co-authored-by: sriram veeraghanta <veeraghanta.sriram@gmail.com>
2024-12-03 13:28:28 +05:30
Vamsi Krishna
2768f560ad [WEB-2802]fix:filters drop down fix safari (#6133)
* filters drop down fix safari

* added comments for translation
2024-12-03 12:51:39 +05:30
Anmol Singh Bhatia
fe5999ceff fix: intake issue permission (#6136) 2024-12-02 19:49:09 +05:30
rahulramesha
da0071256f fix half block dragging (#6135) 2024-12-02 19:30:58 +05:30
M. Palanikannan
3c6006d04a [PE-31] feat: Add lock unlock archive restore realtime sync (#5629)
* fix: add lock unlock archive restore realtime sync

* fix: show only after editor loads

* fix: added strong types

* fix: live events fixed

* fix: remove unused vars and logs

* fix: converted objects to enum

* fix: error handling and removing the events in read only mode

* fix: added check to only update if the image aspect ratio is not present already

* fix: imports

* fix: props order

* revert: no need of these changes anymore

* fix: updated type names

* fix: order of things

* fix: fixed types and renamed variables

* fix: better typing for the real time updates

* fix: trying multiplexing our socket connection

* fix: multiplexing socket connection in read only editor as well

* fix: remove single socket logic

* fix: fixing the cleanup deps for the provider and localprovider

* fix: add a better data structure for managing events

* chore: refactored realtime events into hooks

* feat: fetch page meta while focusing tabs

* fix: cycling through items on slash command item in down arrow

* fix: better naming convention for realtime events

* fix: simplified localprovider initialization and cleaning

* fix: types from ui

* fix: abstracted away from exposing the provider directly

* fix: coderabbit suggestions

* regression: pass user in dependency array

* fix: removed page action api calls by the other users the document is synced with

* chore: removed unused imports
2024-12-02 14:26:36 +05:30
Aaryan Khandelwal
8c04aa6f51 dev: revamp pages authorization (#6094) 2024-12-02 13:59:01 +05:30
Aaryan Khandelwal
9f14167ef5 refactor: editor code splitting (#6102)
* fix: merge conflicts resolved from preview

* fix: space app build errors

* fix: product updates modal

* fix: build errors

* fix: lite text read only editor

* refactor: additional options push logic
2024-12-02 13:51:27 +05:30
Aaryan Khandelwal
11bfbe560a fix: checked colored todo list item (#6113) 2024-12-02 13:47:50 +05:30
Aaryan Khandelwal
fc52936024 fix: escape markdown content for images (#6096) 2024-12-02 13:36:12 +05:30
Vamsi Krishna
5150c661ab reduced the components moved (#6110) 2024-12-02 13:35:40 +05:30
Vamsi Krishna
63bc01f385 [WEB-2774]fix:reordering favorites and favorite folders (#6119)
* fixed re order for favorites

* fixed lint errors

* added reorder

* fixed reorder inside folder

* fixed lint issues

* memoized reorder

* removed unnecessary comments

* seprated duplicate logic to a common file

* removed code comments

* fixed favorite remove while reorder inside folder

* fixed folder remove while reorder inside folder

* fixed-reorder issue

* added last child to drop handled

* fixed orderby function

* removed unncessasary comments
2024-12-02 13:35:09 +05:30
Anmol Singh Bhatia
1953d6fe3a [WEB-2762] chore: loader code refactor (#5992)
* chore: loader code refactor

* chore: code refactor

* chore: code refactor

* chore: code refactor
2024-12-02 13:24:01 +05:30
Anmol Singh Bhatia
1b9033993d [WEB-2799] chore: global component and code refactor (#6131)
* chore: local storage helper hook added to package

* chore: tabs global component added

* chore: collapsible button improvement

* chore: linear progress indicator improvement

* chore: fill icon set added to package
2024-12-02 13:22:08 +05:30
sriram veeraghanta
75ada1bfac fix: constants package updates 2024-12-01 21:26:35 +05:30
Prateek Shourya
d0f9a4d245 chore: add redirection to plane logo in invitations page (#6125) 2024-11-29 20:20:49 +05:30
486 changed files with 8938 additions and 5044 deletions

20
.github/pull_request_template.md vendored Normal file
View File

@@ -0,0 +1,20 @@
### Description
<!-- Provide a detailed description of the changes in this PR -->
### Type of Change
<!-- Put an 'x' in the boxes that apply -->
- [ ] Bug fix (non-breaking change which fixes an issue)
- [ ] Feature (non-breaking change which adds functionality)
- [ ] Improvement (change that would cause existing functionality to not work as expected)
- [ ] Code refactoring
- [ ] Performance improvements
- [ ] Documentation update
### Screenshots and Media (if applicable)
<!-- Add screenshots to help explain your changes, ideally showcasing before and after -->
### Test Scenarios
<!-- Please describe the tests that you ran to verify your changes -->
### References
<!-- Link related issues if there are any -->

View File

@@ -314,8 +314,8 @@ jobs:
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
attach_assets_to_build:
if: ${{ needs.branch_build_setup.outputs.build_type == 'Build' }}
name: Attach Assets to Build
if: ${{ needs.branch_build_setup.outputs.build_type == 'Release' }}
name: Attach Assets to Release
runs-on: ubuntu-20.04
needs: [branch_build_setup]
steps:

View File

@@ -44,7 +44,7 @@ const InstanceGithubAuthenticationPage = observer(() => {
loading: "Saving Configuration...",
success: {
title: "Configuration saved",
message: () => `Github authentication is now ${value ? "active" : "disabled"}.`,
message: () => `GitHub authentication is now ${value ? "active" : "disabled"}.`,
},
error: {
title: "Error",
@@ -67,8 +67,8 @@ const InstanceGithubAuthenticationPage = observer(() => {
<div className="relative container mx-auto w-full h-full p-4 py-4 space-y-6 flex flex-col">
<div className="border-b border-custom-border-100 mx-4 py-4 space-y-1 flex-shrink-0">
<AuthenticationMethodCard
name="Github"
description="Allow members to login or sign up to plane with their Github accounts."
name="GitHub"
description="Allow members to login or sign up to plane with their GitHub accounts."
icon={
<Image
src={resolveGeneralTheme(resolvedTheme) === "dark" ? githubDarkModeImage : githubLightModeImage}

View File

@@ -3,7 +3,7 @@
import { FC, useEffect, useRef } from "react";
import { observer } from "mobx-react";
// plane helpers
import { useOutsideClickDetector } from "@plane/helpers";
import { useOutsideClickDetector } from "@plane/hooks";
// components
import { HelpSection, SidebarMenu, SidebarDropdown } from "@/components/admin-sidebar";
// hooks

View File

@@ -30,7 +30,7 @@ export const InstanceHeader: FC = observer(() => {
case "google":
return "Google";
case "github":
return "Github";
return "GitHub";
case "gitlab":
return "GitLab";
case "workspace":

View File

@@ -1,6 +1,6 @@
{
"name": "admin",
"version": "0.24.0",
"version": "0.24.1",
"private": true,
"scripts": {
"dev": "turbo run develop",
@@ -14,9 +14,10 @@
"dependencies": {
"@headlessui/react": "^1.7.19",
"@plane/constants": "*",
"@plane/helpers": "*",
"@plane/hooks": "*",
"@plane/types": "*",
"@plane/ui": "*",
"@plane/utils": "*",
"@sentry/nextjs": "^8.32.0",
"@tailwindcss/typography": "^0.5.9",
"@types/lodash": "^4.17.0",
@@ -26,7 +27,7 @@
"lucide-react": "^0.356.0",
"mobx": "^6.12.0",
"mobx-react": "^9.1.1",
"next": "^14.2.12",
"next": "^14.2.20",
"next-themes": "^0.2.1",
"postcss": "^8.4.38",
"react": "^18.3.1",

View File

@@ -4,7 +4,7 @@ FROM python:3.12.5-alpine AS backend
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
ENV INSTANCE_CHANGELOG_URL https://api.plane.so/api/public/anchor/8e1c2e4c7bc5493eb7731be3862f6960/pages/
ENV INSTANCE_CHANGELOG_URL https://sites.plane.so/pages/691ef037bcfe416a902e48cb55f59891/
WORKDIR /code

View File

@@ -4,7 +4,7 @@ FROM python:3.12.5-alpine AS backend
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
ENV INSTANCE_CHANGELOG_URL https://api.plane.so/api/public/anchor/8e1c2e4c7bc5493eb7731be3862f6960/pages/
ENV INSTANCE_CHANGELOG_URL https://sites.plane.so/pages/691ef037bcfe416a902e48cb55f59891/
RUN apk --no-cache add \
"bash~=5.2" \

View File

@@ -1,4 +1,4 @@
{
"name": "plane-api",
"version": "0.24.0"
"version": "0.24.1"
}

View File

@@ -4,7 +4,7 @@ from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from plane.db.models import Cycle, CycleIssue
from plane.utils.timezone_converter import convert_to_utc
class CycleSerializer(BaseSerializer):
total_issues = serializers.IntegerField(read_only=True)
@@ -24,6 +24,18 @@ class CycleSerializer(BaseSerializer):
and data.get("start_date", None) > data.get("end_date", None)
):
raise serializers.ValidationError("Start date cannot exceed end date")
if (
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
):
project_id = self.initial_data.get("project_id") or self.instance.project_id
data["start_date"] = convert_to_utc(
str(data.get("start_date").date()), project_id, is_start_date=True
)
data["end_date"] = convert_to_utc(
str(data.get("end_date", None).date()), project_id
)
return data
class Meta:

View File

@@ -5,6 +5,7 @@ from rest_framework import serializers
from .base import BaseSerializer
from .issue import IssueStateSerializer
from plane.db.models import Cycle, CycleIssue, CycleUserProperties
from plane.utils.timezone_converter import convert_to_utc
class CycleWriteSerializer(BaseSerializer):
@@ -15,6 +16,17 @@ class CycleWriteSerializer(BaseSerializer):
and data.get("start_date", None) > data.get("end_date", None)
):
raise serializers.ValidationError("Start date cannot exceed end date")
if (
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
):
project_id = self.initial_data.get("project_id") or self.instance.project_id
data["start_date"] = convert_to_utc(
str(data.get("start_date").date()), project_id, is_start_date=True
)
data["end_date"] = convert_to_utc(
str(data.get("end_date", None).date()), project_id
)
return data
class Meta:

View File

@@ -68,9 +68,7 @@ urlpatterns = [
# user workspace invitations
path(
"users/me/workspaces/invitations/",
UserWorkspaceInvitationsViewSet.as_view(
{"get": "list", "post": "create"}
),
UserWorkspaceInvitationsViewSet.as_view({"get": "list", "post": "create"}),
name="user-workspace-invitations",
),
path(

View File

@@ -1,5 +1,7 @@
# Python imports
import json
import pytz
# Django imports
from django.contrib.postgres.aggregates import ArrayAgg
@@ -52,6 +54,11 @@ from plane.bgtasks.recent_visited_task import recent_visited_task
# Module imports
from .. import BaseAPIView, BaseViewSet
from plane.bgtasks.webhook_task import model_activity
from plane.utils.timezone_converter import (
convert_utc_to_project_timezone,
convert_to_utc,
user_timezone_converter,
)
class CycleViewSet(BaseViewSet):
@@ -67,6 +74,19 @@ class CycleViewSet(BaseViewSet):
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"),
)
project = Project.objects.get(id=self.kwargs.get("project_id"))
# Fetch project for the specific record or pass project_id dynamically
project_timezone = project.timezone
# Convert the current time (timezone.now()) to the project's timezone
local_tz = pytz.timezone(project_timezone)
current_time_in_project_tz = timezone.now().astimezone(local_tz)
# Convert project local time back to UTC for comparison (start_date is stored in UTC)
current_time_in_utc = current_time_in_project_tz.astimezone(pytz.utc)
return self.filter_queryset(
super()
.get_queryset()
@@ -119,12 +139,15 @@ class CycleViewSet(BaseViewSet):
.annotate(
status=Case(
When(
Q(start_date__lte=timezone.now())
& Q(end_date__gte=timezone.now()),
Q(start_date__lte=current_time_in_utc)
& Q(end_date__gte=current_time_in_utc),
then=Value("CURRENT"),
),
When(start_date__gt=timezone.now(), then=Value("UPCOMING")),
When(end_date__lt=timezone.now(), then=Value("COMPLETED")),
When(
start_date__gt=current_time_in_utc,
then=Value("UPCOMING"),
),
When(end_date__lt=current_time_in_utc, then=Value("COMPLETED")),
When(
Q(start_date__isnull=True) & Q(end_date__isnull=True),
then=Value("DRAFT"),
@@ -160,10 +183,22 @@ class CycleViewSet(BaseViewSet):
# Update the order by
queryset = queryset.order_by("-is_favorite", "-created_at")
project = Project.objects.get(id=self.kwargs.get("project_id"))
# Fetch project for the specific record or pass project_id dynamically
project_timezone = project.timezone
# Convert the current time (timezone.now()) to the project's timezone
local_tz = pytz.timezone(project_timezone)
current_time_in_project_tz = timezone.now().astimezone(local_tz)
# Convert project local time back to UTC for comparison (start_date is stored in UTC)
current_time_in_utc = current_time_in_project_tz.astimezone(pytz.utc)
# Current Cycle
if cycle_view == "current":
queryset = queryset.filter(
start_date__lte=timezone.now(), end_date__gte=timezone.now()
start_date__lte=current_time_in_utc, end_date__gte=current_time_in_utc
)
data = queryset.values(
@@ -191,6 +226,8 @@ class CycleViewSet(BaseViewSet):
"version",
"created_by",
)
datetime_fields = ["start_date", "end_date"]
data = user_timezone_converter(data, datetime_fields, project_timezone)
if data:
return Response(data, status=status.HTTP_200_OK)
@@ -221,6 +258,8 @@ class CycleViewSet(BaseViewSet):
"version",
"created_by",
)
datetime_fields = ["start_date", "end_date"]
data = user_timezone_converter(data, datetime_fields, request.user.user_timezone)
return Response(data, status=status.HTTP_200_OK)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
@@ -417,6 +456,8 @@ class CycleViewSet(BaseViewSet):
)
queryset = queryset.first()
datetime_fields = ["start_date", "end_date"]
data = user_timezone_converter(data, datetime_fields, request.user.user_timezone)
recent_visited_task.delay(
slug=slug,
@@ -492,6 +533,9 @@ class CycleDateCheckEndpoint(BaseAPIView):
status=status.HTTP_400_BAD_REQUEST,
)
start_date = convert_to_utc(str(start_date), project_id, is_start_date=True)
end_date = convert_to_utc(str(end_date), project_id)
# Check if any cycle intersects in the given interval
cycles = Cycle.objects.filter(
Q(workspace__slug=slug)

View File

@@ -15,8 +15,6 @@ from django.db.models import (
UUIDField,
Value,
Subquery,
Case,
When,
)
from django.db.models.functions import Coalesce
from django.utils import timezone
@@ -56,10 +54,11 @@ from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPaginator
from .. import BaseAPIView, BaseViewSet
from plane.utils.user_timezone_converter import user_timezone_converter
from plane.utils.timezone_converter import user_timezone_converter
from plane.bgtasks.recent_visited_task import recent_visited_task
from plane.utils.global_paginator import paginate
from plane.bgtasks.webhook_task import model_activity
from plane.bgtasks.issue_description_version_task import issue_description_version_task
class IssueListEndpoint(BaseAPIView):
@@ -430,6 +429,13 @@ class IssueViewSet(BaseViewSet):
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
)
# updated issue description version
issue_description_version_task.delay(
updated_issue=json.dumps(request.data, cls=DjangoJSONEncoder),
issue_id=str(serializer.data["id"]),
user_id=request.user.id,
is_creating=True,
)
return Response(issue, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -445,12 +451,10 @@ class IssueViewSet(BaseViewSet):
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module")
.annotate(
cycle_id=Case(
When(
issue_cycle__cycle__deleted_at__isnull=True,
then=F("issue_cycle__cycle_id"),
),
default=None,
cycle_id=Subquery(
CycleIssue.objects.filter(issue=OuterRef("id")).values("cycle_id")[
:1
]
)
)
.annotate(
@@ -653,6 +657,12 @@ class IssueViewSet(BaseViewSet):
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
)
# updated issue description version
issue_description_version_task.delay(
updated_issue=current_instance,
issue_id=str(serializer.data.get("id", None)),
user_id=request.user.id,
)
return Response(status=status.HTTP_204_NO_CONTENT)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)

View File

@@ -20,7 +20,7 @@ from plane.app.serializers import IssueSerializer
from plane.app.permissions import ProjectEntityPermission
from plane.db.models import Issue, IssueLink, FileAsset, CycleIssue
from plane.bgtasks.issue_activities_task import issue_activity
from plane.utils.user_timezone_converter import user_timezone_converter
from plane.utils.timezone_converter import user_timezone_converter
from collections import defaultdict

View File

@@ -28,7 +28,7 @@ from plane.app.permissions import ProjectEntityPermission
from plane.app.serializers import ModuleDetailSerializer
from plane.db.models import Issue, Module, ModuleLink, UserFavorite, Project
from plane.utils.analytics_plot import burndown_plot
from plane.utils.user_timezone_converter import user_timezone_converter
from plane.utils.timezone_converter import user_timezone_converter
# Module imports

View File

@@ -56,7 +56,7 @@ from plane.db.models import (
Project,
)
from plane.utils.analytics_plot import burndown_plot
from plane.utils.user_timezone_converter import user_timezone_converter
from plane.utils.timezone_converter import user_timezone_converter
from plane.bgtasks.webhook_task import model_activity
from .. import BaseAPIView, BaseViewSet
from plane.bgtasks.recent_visited_task import recent_visited_task

View File

@@ -114,7 +114,7 @@ class PageViewSet(BaseViewSet):
.distinct()
)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
def create(self, request, slug, project_id):
serializer = PageSerializer(
data=request.data,
@@ -134,7 +134,7 @@ class PageViewSet(BaseViewSet):
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
def partial_update(self, request, slug, project_id, pk):
try:
page = Page.objects.get(
@@ -234,7 +234,7 @@ class PageViewSet(BaseViewSet):
)
return Response(data, status=status.HTTP_200_OK)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
@allow_permission([ROLE.ADMIN], model=Page, creator=True)
def lock(self, request, slug, project_id, pk):
page = Page.objects.filter(
pk=pk, workspace__slug=slug, projects__id=project_id
@@ -244,7 +244,7 @@ class PageViewSet(BaseViewSet):
page.save()
return Response(status=status.HTTP_204_NO_CONTENT)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
@allow_permission([ROLE.ADMIN], model=Page, creator=True)
def unlock(self, request, slug, project_id, pk):
page = Page.objects.filter(
pk=pk, workspace__slug=slug, projects__id=project_id
@@ -255,7 +255,7 @@ class PageViewSet(BaseViewSet):
return Response(status=status.HTTP_204_NO_CONTENT)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
@allow_permission([ROLE.ADMIN], model=Page, creator=True)
def access(self, request, slug, project_id, pk):
access = request.data.get("access", 0)
page = Page.objects.filter(
@@ -296,7 +296,7 @@ class PageViewSet(BaseViewSet):
pages = PageSerializer(queryset, many=True).data
return Response(pages, status=status.HTTP_200_OK)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
@allow_permission([ROLE.ADMIN], model=Page, creator=True)
def archive(self, request, slug, project_id, pk):
page = Page.objects.get(pk=pk, workspace__slug=slug, projects__id=project_id)
@@ -323,7 +323,7 @@ class PageViewSet(BaseViewSet):
return Response({"archived_at": str(datetime.now())}, status=status.HTTP_200_OK)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
@allow_permission([ROLE.ADMIN], model=Page, creator=True)
def unarchive(self, request, slug, project_id, pk):
page = Page.objects.get(pk=pk, workspace__slug=slug, projects__id=project_id)
@@ -348,7 +348,7 @@ class PageViewSet(BaseViewSet):
return Response(status=status.HTTP_204_NO_CONTENT)
@allow_permission([ROLE.ADMIN], creator=True, model=Page)
@allow_permission([ROLE.ADMIN], model=Page, creator=True)
def destroy(self, request, slug, project_id, pk):
page = Page.objects.get(pk=pk, workspace__slug=slug, projects__id=project_id)

View File

@@ -16,12 +16,7 @@ from plane.app.permissions import (
WorkspaceUserPermission,
)
from plane.db.models import (
Project,
ProjectMember,
IssueUserProperty,
WorkspaceMember,
)
from plane.db.models import Project, ProjectMember, IssueUserProperty, WorkspaceMember
from plane.bgtasks.project_add_user_email_task import project_add_user_email
from plane.utils.host import base_host
from plane.app.permissions.base import allow_permission, ROLE
@@ -83,10 +78,7 @@ class ProjectMemberViewSet(BaseViewSet):
workspace_member_role = WorkspaceMember.objects.get(
workspace__slug=slug, member=member, is_active=True
).role
if workspace_member_role in [20] and member_roles.get(member) in [
5,
15,
]:
if workspace_member_role in [20] and member_roles.get(member) in [5, 15]:
return Response(
{
"error": "You cannot add a user with role lower than the workspace role"
@@ -94,10 +86,7 @@ class ProjectMemberViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST,
)
if workspace_member_role in [5] and member_roles.get(member) in [
15,
20,
]:
if workspace_member_role in [5] and member_roles.get(member) in [15, 20]:
return Response(
{
"error": "You cannot add a user with role higher than the workspace role"
@@ -135,8 +124,7 @@ class ProjectMemberViewSet(BaseViewSet):
sort_order = [
project_member.get("sort_order")
for project_member in project_members
if str(project_member.get("member_id"))
== str(member.get("member_id"))
if str(project_member.get("member_id")) == str(member.get("member_id"))
]
# Create a new project member
bulk_project_members.append(
@@ -145,9 +133,7 @@ class ProjectMemberViewSet(BaseViewSet):
role=member.get("role", 5),
project_id=project_id,
workspace_id=project.workspace_id,
sort_order=(
sort_order[0] - 10000 if len(sort_order) else 65535
),
sort_order=(sort_order[0] - 10000 if len(sort_order) else 65535),
)
)
# Create a new issue property
@@ -238,9 +224,7 @@ class ProjectMemberViewSet(BaseViewSet):
> requested_project_member.role
):
return Response(
{
"error": "You cannot update a role that is higher than your own role"
},
{"error": "You cannot update a role that is higher than your own role"},
status=status.HTTP_400_BAD_REQUEST,
)
@@ -280,9 +264,7 @@ class ProjectMemberViewSet(BaseViewSet):
# User cannot deactivate higher role
if requesting_project_member.role < project_member.role:
return Response(
{
"error": "You cannot remove a user having role higher than you"
},
{"error": "You cannot remove a user having role higher than you"},
status=status.HTTP_400_BAD_REQUEST,
)
@@ -303,10 +285,7 @@ class ProjectMemberViewSet(BaseViewSet):
if (
project_member.role == 20
and not ProjectMember.objects.filter(
workspace__slug=slug,
project_id=project_id,
role=20,
is_active=True,
workspace__slug=slug, project_id=project_id, role=20, is_active=True
).count()
> 1
):
@@ -344,7 +323,6 @@ class UserProjectRolesEndpoint(BaseAPIView):
).values("project_id", "role")
project_members = {
str(member["project_id"]): member["role"]
for member in project_members
str(member["project_id"]): member["role"] for member in project_members
}
return Response(project_members, status=status.HTTP_200_OK)

View File

@@ -41,6 +41,7 @@ from django.views.decorators.vary import vary_on_cookie
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
from plane.license.utils.instance_value import get_configuration_value
class WorkSpaceViewSet(BaseViewSet):
model = Workspace
serializer_class = WorkSpaceSerializer
@@ -81,12 +82,12 @@ class WorkSpaceViewSet(BaseViewSet):
def create(self, request):
try:
DISABLE_WORKSPACE_CREATION, = get_configuration_value(
(DISABLE_WORKSPACE_CREATION,) = get_configuration_value(
[
{
"key": "DISABLE_WORKSPACE_CREATION",
"default": os.environ.get("DISABLE_WORKSPACE_CREATION", "0"),
},
}
]
)

View File

@@ -10,7 +10,7 @@ from plane.app.views.base import BaseAPIView
from plane.db.models import Cycle
from plane.app.permissions import WorkspaceViewerPermission
from plane.app.serializers.cycle import CycleSerializer
from plane.utils.timezone_converter import user_timezone_converter
class WorkspaceCyclesEndpoint(BaseAPIView):
permission_classes = [WorkspaceViewerPermission]

View File

@@ -1,22 +1,12 @@
# Django imports
from django.db.models import (
Count,
Q,
OuterRef,
Subquery,
IntegerField,
)
from django.db.models import Count, Q, OuterRef, Subquery, IntegerField
from django.db.models.functions import Coalesce
# Third party modules
from rest_framework import status
from rest_framework.response import Response
from plane.app.permissions import (
WorkspaceEntityPermission,
allow_permission,
ROLE,
)
from plane.app.permissions import WorkspaceEntityPermission, allow_permission, ROLE
# Module imports
from plane.app.serializers import (
@@ -26,12 +16,7 @@ from plane.app.serializers import (
WorkSpaceMemberSerializer,
)
from plane.app.views.base import BaseAPIView
from plane.db.models import (
Project,
ProjectMember,
WorkspaceMember,
DraftIssue,
)
from plane.db.models import Project, ProjectMember, WorkspaceMember, DraftIssue
from plane.utils.cache import invalidate_cache
from .. import BaseViewSet
@@ -119,9 +104,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
if requesting_workspace_member.role < workspace_member.role:
return Response(
{
"error": "You cannot remove a user having role higher than you"
},
{"error": "You cannot remove a user having role higher than you"},
status=status.HTTP_400_BAD_REQUEST,
)
@@ -148,9 +131,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
# Deactivate the users from the projects where the user is part of
_ = ProjectMember.objects.filter(
workspace__slug=slug,
member_id=workspace_member.member_id,
is_active=True,
workspace__slug=slug, member_id=workspace_member.member_id, is_active=True
).update(is_active=False)
workspace_member.is_active = False
@@ -164,9 +145,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
multiple=True,
)
@invalidate_cache(path="/api/users/me/settings/")
@invalidate_cache(
path="api/users/me/workspaces/", user=False, multiple=True
)
@invalidate_cache(path="api/users/me/workspaces/", user=False, multiple=True)
@allow_permission(
allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE"
)
@@ -213,9 +192,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
# # Deactivate the users from the projects where the user is part of
_ = ProjectMember.objects.filter(
workspace__slug=slug,
member_id=workspace_member.member_id,
is_active=True,
workspace__slug=slug, member_id=workspace_member.member_id, is_active=True
).update(is_active=False)
# # Deactivate the user
@@ -279,9 +256,7 @@ class WorkspaceProjectMemberEndpoint(BaseAPIView):
project_members = ProjectMember.objects.filter(
workspace__slug=slug, project_id__in=project_ids, is_active=True
).select_related("project", "member", "workspace")
project_members = ProjectMemberRoleSerializer(
project_members, many=True
).data
project_members = ProjectMemberRoleSerializer(project_members, many=True).data
project_members_dict = dict()

View File

@@ -60,6 +60,9 @@ class EmailCheckEndpoint(APIView):
)
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
# Lower the email
email = str(email).lower().strip()
# Validate email
try:
validate_email(email)

View File

@@ -60,6 +60,7 @@ class EmailCheckSpaceEndpoint(APIView):
)
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
email = str(email).lower().strip()
# Validate email
try:
validate_email(email)

View File

@@ -3,7 +3,8 @@ from django.utils import timezone
from django.apps import apps
from django.conf import settings
from django.db import models
from django.core.exceptions import ObjectDoesNotExist
from django.db.models.fields.related import OneToOneRel
# Third party imports
from celery import shared_task
@@ -11,31 +12,98 @@ from celery import shared_task
@shared_task
def soft_delete_related_objects(app_label, model_name, instance_pk, using=None):
"""
Soft delete related objects for a given model instance
"""
# Get the model class using app registry
model_class = apps.get_model(app_label, model_name)
instance = model_class.all_objects.get(pk=instance_pk)
related_fields = instance._meta.get_fields()
for field in related_fields:
if field.one_to_many or field.one_to_one:
try:
# Check if the field has CASCADE on delete
if (
not hasattr(field.remote_field, "on_delete")
or field.remote_field.on_delete == models.CASCADE
):
if field.one_to_many:
related_objects = getattr(instance, field.name).all()
elif field.one_to_one:
related_object = getattr(instance, field.name)
related_objects = (
[related_object] if related_object is not None else []
)
for obj in related_objects:
if obj:
obj.deleted_at = timezone.now()
obj.save(using=using)
except ObjectDoesNotExist:
pass
# Get the instance using all_objects to ensure we can get even if it's already soft deleted
try:
instance = model_class.all_objects.get(pk=instance_pk)
except model_class.DoesNotExist:
return
# Get all related fields that are reverse relationships
all_related = [
f
for f in instance._meta.get_fields()
if (f.one_to_many or f.one_to_one) and f.auto_created and not f.concrete
]
# Handle each related field
for relation in all_related:
related_name = relation.get_accessor_name()
# Skip if the relation doesn't exist
if not hasattr(instance, related_name):
continue
# Get the on_delete behavior name
on_delete_name = (
relation.on_delete.__name__
if hasattr(relation.on_delete, "__name__")
else ""
)
if on_delete_name == "DO_NOTHING":
continue
elif on_delete_name == "SET_NULL":
# Handle SET_NULL relationships
if isinstance(relation, OneToOneRel):
# For OneToOne relationships
related_obj = getattr(instance, related_name, None)
if related_obj and isinstance(related_obj, models.Model):
setattr(related_obj, relation.remote_field.name, None)
related_obj.save(update_fields=[relation.remote_field.name])
else:
# For other relationships
related_queryset = getattr(instance, related_name).all()
related_queryset.update(**{relation.remote_field.name: None})
else:
# Handle CASCADE and other delete behaviors
try:
if relation.one_to_one:
# Handle OneToOne relationships
related_obj = getattr(instance, related_name, None)
if related_obj:
if hasattr(related_obj, "deleted_at"):
if not related_obj.deleted_at:
related_obj.deleted_at = timezone.now()
related_obj.save()
# Recursively handle related objects
soft_delete_related_objects(
related_obj._meta.app_label,
related_obj._meta.model_name,
related_obj.pk,
using,
)
else:
# Handle other relationships
related_queryset = getattr(instance, related_name).all()
for related_obj in related_queryset:
if hasattr(related_obj, "deleted_at"):
if not related_obj.deleted_at:
related_obj.deleted_at = timezone.now()
related_obj.save()
# Recursively handle related objects
soft_delete_related_objects(
related_obj._meta.app_label,
related_obj._meta.model_name,
related_obj.pk,
using,
)
except Exception as e:
# Log the error or handle as needed
print(f"Error handling relation {related_name}: {str(e)}")
continue
# Finally, soft delete the instance itself if it hasn't been deleted yet
if hasattr(instance, "deleted_at") and not instance.deleted_at:
instance.deleted_at = timezone.now()
instance.save()
# @shared_task

View File

@@ -162,8 +162,7 @@ def generate_table_row(issue):
issue["priority"],
(
f"{issue['created_by__first_name']} {issue['created_by__last_name']}"
if issue["created_by__first_name"]
and issue["created_by__last_name"]
if issue["created_by__first_name"] and issue["created_by__last_name"]
else ""
),
(
@@ -197,8 +196,7 @@ def generate_json_row(issue):
"Priority": issue["priority"],
"Created By": (
f"{issue['created_by__first_name']} {issue['created_by__last_name']}"
if issue["created_by__first_name"]
and issue["created_by__last_name"]
if issue["created_by__first_name"] and issue["created_by__last_name"]
else ""
),
"Assignee": (
@@ -208,17 +206,11 @@ def generate_json_row(issue):
),
"Labels": issue["labels__name"] if issue["labels__name"] else "",
"Cycle Name": issue["issue_cycle__cycle__name"],
"Cycle Start Date": dateConverter(
issue["issue_cycle__cycle__start_date"]
),
"Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]),
"Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]),
"Module Name": issue["issue_module__module__name"],
"Module Start Date": dateConverter(
issue["issue_module__module__start_date"]
),
"Module Target Date": dateConverter(
issue["issue_module__module__target_date"]
),
"Module Start Date": dateConverter(issue["issue_module__module__start_date"]),
"Module Target Date": dateConverter(issue["issue_module__module__target_date"]),
"Created At": dateTimeConverter(issue["created_at"]),
"Updated At": dateTimeConverter(issue["updated_at"]),
"Completed At": dateTimeConverter(issue["completed_at"]),

View File

@@ -0,0 +1,125 @@
# Python imports
from typing import Optional
import logging
# Django imports
from django.utils import timezone
from django.db import transaction
# Third party imports
from celery import shared_task
# Module imports
from plane.db.models import Issue, IssueDescriptionVersion, ProjectMember
from plane.utils.exception_logger import log_exception
def get_owner_id(issue: Issue) -> Optional[int]:
"""Get the owner ID of the issue"""
if issue.updated_by_id:
return issue.updated_by_id
if issue.created_by_id:
return issue.created_by_id
# Find project admin as fallback
project_member = ProjectMember.objects.filter(
project_id=issue.project_id,
role=20, # Admin role
).first()
return project_member.member_id if project_member else None
@shared_task
def sync_issue_description_version(batch_size=5000, offset=0, countdown=300):
"""Task to create IssueDescriptionVersion records for existing Issues in batches"""
try:
with transaction.atomic():
base_query = Issue.objects
total_issues_count = base_query.count()
if total_issues_count == 0:
return
# Calculate batch range
end_offset = min(offset + batch_size, total_issues_count)
# Fetch issues with related data
issues_batch = (
base_query.order_by("created_at")
.select_related("workspace", "project")
.only(
"id",
"workspace_id",
"project_id",
"created_by_id",
"updated_by_id",
"description_binary",
"description_html",
"description_stripped",
"description",
)[offset:end_offset]
)
if not issues_batch:
return
version_objects = []
for issue in issues_batch:
# Validate required fields
if not issue.workspace_id or not issue.project_id:
logging.warning(
f"Skipping {issue.id} - missing workspace_id or project_id"
)
continue
# Determine owned_by_id
owned_by_id = get_owner_id(issue)
if owned_by_id is None:
logging.warning(f"Skipping issue {issue.id} - missing owned_by")
continue
# Create version object
version_objects.append(
IssueDescriptionVersion(
workspace_id=issue.workspace_id,
project_id=issue.project_id,
created_by_id=issue.created_by_id,
updated_by_id=issue.updated_by_id,
owned_by_id=owned_by_id,
last_saved_at=timezone.now(),
issue_id=issue.id,
description_binary=issue.description_binary,
description_html=issue.description_html,
description_stripped=issue.description_stripped,
description_json=issue.description,
)
)
# Bulk create version objects
if version_objects:
IssueDescriptionVersion.objects.bulk_create(version_objects)
# Schedule next batch if needed
if end_offset < total_issues_count:
sync_issue_description_version.apply_async(
kwargs={
"batch_size": batch_size,
"offset": end_offset,
"countdown": countdown,
},
countdown=countdown,
)
return
except Exception as e:
log_exception(e)
return
@shared_task
def schedule_issue_description_version(batch_size=5000, countdown=300):
sync_issue_description_version.delay(
batch_size=int(batch_size), countdown=countdown
)

View File

@@ -0,0 +1,84 @@
from celery import shared_task
from django.db import transaction
from django.utils import timezone
from typing import Optional, Dict
import json
from plane.db.models import Issue, IssueDescriptionVersion
from plane.utils.exception_logger import log_exception
def should_update_existing_version(
version: IssueDescriptionVersion, user_id: str, max_time_difference: int = 600
) -> bool:
if not version:
return
time_difference = (timezone.now() - version.last_saved_at).total_seconds()
return (
str(version.owned_by_id) == str(user_id)
and time_difference <= max_time_difference
)
def update_existing_version(version: IssueDescriptionVersion, issue) -> None:
version.description_json = issue.description
version.description_html = issue.description_html
version.description_binary = issue.description_binary
version.description_stripped = issue.description_stripped
version.last_saved_at = timezone.now()
version.save(
update_fields=[
"description_json",
"description_html",
"description_binary",
"description_stripped",
"last_saved_at",
]
)
@shared_task
def issue_description_version_task(
updated_issue, issue_id, user_id, is_creating=False
) -> Optional[bool]:
try:
# Parse updated issue data
current_issue: Dict = json.loads(updated_issue) if updated_issue else {}
# Get current issue
issue = Issue.objects.get(id=issue_id)
# Check if description has changed
if (
current_issue.get("description_html") == issue.description_html
and not is_creating
):
return
with transaction.atomic():
# Get latest version
latest_version = (
IssueDescriptionVersion.objects.filter(issue_id=issue_id)
.order_by("-last_saved_at")
.first()
)
# Determine whether to update existing or create new version
if should_update_existing_version(version=latest_version, user_id=user_id):
update_existing_version(latest_version, issue)
else:
IssueDescriptionVersion.log_issue_description_version(issue, user_id)
return
except Issue.DoesNotExist:
# Issue no longer exists, skip processing
return
except json.JSONDecodeError as e:
log_exception(f"Invalid JSON for updated_issue: {e}")
return
except Exception as e:
log_exception(f"Error processing issue description version: {e}")
return

View File

@@ -0,0 +1,254 @@
# Python imports
import json
from typing import Optional, List, Dict
from uuid import UUID
from itertools import groupby
import logging
# Django imports
from django.utils import timezone
from django.db import transaction
# Third party imports
from celery import shared_task
# Module imports
from plane.db.models import (
Issue,
IssueVersion,
ProjectMember,
CycleIssue,
ModuleIssue,
IssueActivity,
IssueAssignee,
IssueLabel,
)
from plane.utils.exception_logger import log_exception
@shared_task
def issue_task(updated_issue, issue_id, user_id):
try:
current_issue = json.loads(updated_issue) if updated_issue else {}
issue = Issue.objects.get(id=issue_id)
updated_current_issue = {}
for key, value in current_issue.items():
if getattr(issue, key) != value:
updated_current_issue[key] = value
if updated_current_issue:
issue_version = (
IssueVersion.objects.filter(issue_id=issue_id)
.order_by("-last_saved_at")
.first()
)
if (
issue_version
and str(issue_version.owned_by) == str(user_id)
and (timezone.now() - issue_version.last_saved_at).total_seconds()
<= 600
):
for key, value in updated_current_issue.items():
setattr(issue_version, key, value)
issue_version.last_saved_at = timezone.now()
issue_version.save(
update_fields=list(updated_current_issue.keys()) + ["last_saved_at"]
)
else:
IssueVersion.log_issue_version(issue, user_id)
return
except Issue.DoesNotExist:
return
except Exception as e:
log_exception(e)
return
def get_owner_id(issue: Issue) -> Optional[int]:
"""Get the owner ID of the issue"""
if issue.updated_by_id:
return issue.updated_by_id
if issue.created_by_id:
return issue.created_by_id
# Find project admin as fallback
project_member = ProjectMember.objects.filter(
project_id=issue.project_id,
role=20, # Admin role
).first()
return project_member.member_id if project_member else None
def get_related_data(issue_ids: List[UUID]) -> Dict:
"""Get related data for the given issue IDs"""
cycle_issues = {
ci.issue_id: ci.cycle_id
for ci in CycleIssue.objects.filter(issue_id__in=issue_ids)
}
# Get assignees with proper grouping
assignee_records = list(
IssueAssignee.objects.filter(issue_id__in=issue_ids)
.values_list("issue_id", "assignee_id")
.order_by("issue_id")
)
assignees = {}
for issue_id, group in groupby(assignee_records, key=lambda x: x[0]):
assignees[issue_id] = [str(g[1]) for g in group]
# Get labels with proper grouping
label_records = list(
IssueLabel.objects.filter(issue_id__in=issue_ids)
.values_list("issue_id", "label_id")
.order_by("issue_id")
)
labels = {}
for issue_id, group in groupby(label_records, key=lambda x: x[0]):
labels[issue_id] = [str(g[1]) for g in group]
# Get modules with proper grouping
module_records = list(
ModuleIssue.objects.filter(issue_id__in=issue_ids)
.values_list("issue_id", "module_id")
.order_by("issue_id")
)
modules = {}
for issue_id, group in groupby(module_records, key=lambda x: x[0]):
modules[issue_id] = [str(g[1]) for g in group]
# Get latest activities
latest_activities = {}
activities = IssueActivity.objects.filter(issue_id__in=issue_ids).order_by(
"issue_id", "-created_at"
)
for issue_id, activities_group in groupby(activities, key=lambda x: x.issue_id):
first_activity = next(activities_group, None)
if first_activity:
latest_activities[issue_id] = first_activity.id
return {
"cycle_issues": cycle_issues,
"assignees": assignees,
"labels": labels,
"modules": modules,
"activities": latest_activities,
}
def create_issue_version(issue: Issue, related_data: Dict) -> Optional[IssueVersion]:
"""Create IssueVersion object from the given issue and related data"""
try:
if not issue.workspace_id or not issue.project_id:
logging.warning(
f"Skipping issue {issue.id} - missing workspace_id or project_id"
)
return None
owned_by_id = get_owner_id(issue)
if owned_by_id is None:
logging.warning(f"Skipping issue {issue.id} - missing owned_by")
return None
return IssueVersion(
workspace_id=issue.workspace_id,
project_id=issue.project_id,
created_by_id=issue.created_by_id,
updated_by_id=issue.updated_by_id,
owned_by_id=owned_by_id,
last_saved_at=timezone.now(),
activity_id=related_data["activities"].get(issue.id),
properties=getattr(issue, "properties", {}),
meta=getattr(issue, "meta", {}),
issue_id=issue.id,
parent=issue.parent_id,
state=issue.state_id,
estimate_point=issue.estimate_point_id,
name=issue.name,
priority=issue.priority,
start_date=issue.start_date,
target_date=issue.target_date,
assignees=related_data["assignees"].get(issue.id, []),
sequence_id=issue.sequence_id,
labels=related_data["labels"].get(issue.id, []),
sort_order=issue.sort_order,
completed_at=issue.completed_at,
archived_at=issue.archived_at,
is_draft=issue.is_draft,
external_source=issue.external_source,
external_id=issue.external_id,
type=issue.type_id,
cycle=related_data["cycle_issues"].get(issue.id),
modules=related_data["modules"].get(issue.id, []),
)
except Exception as e:
log_exception(e)
return None
@shared_task
def sync_issue_version(batch_size=5000, offset=0, countdown=300):
"""Task to create IssueVersion records for existing Issues in batches"""
try:
with transaction.atomic():
base_query = Issue.objects
total_issues_count = base_query.count()
if total_issues_count == 0:
return
end_offset = min(offset + batch_size, total_issues_count)
# Get issues batch with optimized queries
issues_batch = list(
base_query.order_by("created_at")
.select_related("workspace", "project")
.all()[offset:end_offset]
)
if not issues_batch:
return
# Get all related data in bulk
issue_ids = [issue.id for issue in issues_batch]
related_data = get_related_data(issue_ids)
issue_versions = []
for issue in issues_batch:
version = create_issue_version(issue, related_data)
if version:
issue_versions.append(version)
# Bulk create versions
if issue_versions:
IssueVersion.objects.bulk_create(issue_versions, batch_size=1000)
# Schedule the next batch if there are more workspaces to process
if end_offset < total_issues_count:
sync_issue_version.apply_async(
kwargs={
"batch_size": batch_size,
"offset": end_offset,
"countdown": countdown,
},
countdown=countdown,
)
logging.info(f"Processed Issues: {end_offset}")
return
except Exception as e:
log_exception(e)
return
@shared_task
def schedule_issue_version(batch_size=5000, countdown=300):
sync_issue_version.delay(batch_size=int(batch_size), countdown=countdown)

View File

@@ -257,7 +257,9 @@ def notifications(
)
new_mentions = [
str(mention) for mention in new_mentions if mention in set(project_members)
str(mention)
for mention in new_mentions
if mention in set(project_members)
]
removed_mention = get_removed_mentions(
requested_instance=requested_data, current_instance=current_instance

View File

@@ -13,28 +13,14 @@ from plane.db.models import (
class Command(BaseCommand):
help = "Add a member to a project. If present in the workspace"
def add_arguments(self, parser):
# Positional argument
parser.add_argument("--project_id", type=str, nargs="?", help="Project ID")
parser.add_argument("--user_email", type=str, nargs="?", help="User Email")
parser.add_argument(
"--project_id",
type=str,
nargs="?",
help="Project ID",
)
parser.add_argument(
"--user_email",
type=str,
nargs="?",
help="User Email",
)
parser.add_argument(
"--role",
type=int,
nargs="?",
help="Role of the user in the project",
"--role", type=int, nargs="?", help="Role of the user in the project"
)
def handle(self, *args: Any, **options: Any):
@@ -67,9 +53,7 @@ class Command(BaseCommand):
# Get the smallest sort order
smallest_sort_order = (
ProjectMember.objects.filter(
workspace_id=project.workspace_id,
)
ProjectMember.objects.filter(workspace_id=project.workspace_id)
.order_by("sort_order")
.first()
)
@@ -79,22 +63,15 @@ class Command(BaseCommand):
else:
sort_order = 65535
if ProjectMember.objects.filter(
project=project,
member=user,
).exists():
if ProjectMember.objects.filter(project=project, member=user).exists():
# Update the project member
ProjectMember.objects.filter(
project=project,
member=user,
).update(is_active=True, sort_order=sort_order, role=role)
ProjectMember.objects.filter(project=project, member=user).update(
is_active=True, sort_order=sort_order, role=role
)
else:
# Create the project member
ProjectMember.objects.create(
project=project,
member=user,
role=role,
sort_order=sort_order,
project=project, member=user, role=role, sort_order=sort_order
)
# Issue Property
@@ -102,9 +79,7 @@ class Command(BaseCommand):
# Success message
self.stdout.write(
self.style.SUCCESS(
f"User {user_email} added to project {project_id}"
)
self.style.SUCCESS(f"User {user_email} added to project {project_id}")
)
return
except CommandError as e:

View File

@@ -0,0 +1,23 @@
# Django imports
from django.core.management.base import BaseCommand
# Module imports
from plane.bgtasks.issue_description_version_sync import (
schedule_issue_description_version,
)
class Command(BaseCommand):
help = "Creates IssueDescriptionVersion records for existing Issues in batches"
def handle(self, *args, **options):
batch_size = input("Enter the batch size: ")
batch_countdown = input("Enter the batch countdown: ")
schedule_issue_description_version.delay(
batch_size=batch_size, countdown=int(batch_countdown)
)
self.stdout.write(
self.style.SUCCESS("Successfully created issue description version task")
)

View File

@@ -0,0 +1,19 @@
# Django imports
from django.core.management.base import BaseCommand
# Module imports
from plane.bgtasks.issue_version_sync import schedule_issue_version
class Command(BaseCommand):
help = "Creates IssueVersion records for existing Issues in batches"
def handle(self, *args, **options):
batch_size = input("Enter the batch size: ")
batch_countdown = input("Enter the batch countdown: ")
schedule_issue_version.delay(
batch_size=batch_size, countdown=int(batch_countdown)
)
self.stdout.write(self.style.SUCCESS("Successfully created issue version task"))

View File

@@ -0,0 +1,117 @@
# Generated by Django 4.2.17 on 2024-12-13 10:09
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import plane.db.models.user
import uuid
class Migration(migrations.Migration):
dependencies = [
('db', '0086_issueversion_alter_teampage_unique_together_and_more'),
]
operations = [
migrations.RemoveField(
model_name='issueversion',
name='description',
),
migrations.RemoveField(
model_name='issueversion',
name='description_binary',
),
migrations.RemoveField(
model_name='issueversion',
name='description_html',
),
migrations.RemoveField(
model_name='issueversion',
name='description_stripped',
),
migrations.AddField(
model_name='issueversion',
name='activity',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='versions', to='db.issueactivity'),
),
migrations.AddField(
model_name='profile',
name='is_mobile_onboarded',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='profile',
name='mobile_onboarding_step',
field=models.JSONField(default=plane.db.models.user.get_mobile_default_onboarding),
),
migrations.AddField(
model_name='profile',
name='mobile_timezone_auto_set',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='profile',
name='language',
field=models.CharField(default='en', max_length=255),
),
migrations.AlterField(
model_name='issueversion',
name='owned_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_versions', to=settings.AUTH_USER_MODEL),
),
migrations.CreateModel(
name='Sticky',
fields=[
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
('deleted_at', models.DateTimeField(blank=True, null=True, verbose_name='Deleted At')),
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
('name', models.TextField()),
('description', models.JSONField(blank=True, default=dict)),
('description_html', models.TextField(blank=True, default='<p></p>')),
('description_stripped', models.TextField(blank=True, null=True)),
('description_binary', models.BinaryField(null=True)),
('logo_props', models.JSONField(default=dict)),
('color', models.CharField(blank=True, max_length=255, null=True)),
('background_color', models.CharField(blank=True, max_length=255, null=True)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='stickies', to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='stickies', to='db.workspace')),
],
options={
'verbose_name': 'Sticky',
'verbose_name_plural': 'Stickies',
'db_table': 'stickies',
'ordering': ('-created_at',),
},
),
migrations.CreateModel(
name='IssueDescriptionVersion',
fields=[
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
('deleted_at', models.DateTimeField(blank=True, null=True, verbose_name='Deleted At')),
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
('description_binary', models.BinaryField(null=True)),
('description_html', models.TextField(blank=True, default='<p></p>')),
('description_stripped', models.TextField(blank=True, null=True)),
('description_json', models.JSONField(blank=True, default=dict)),
('last_saved_at', models.DateTimeField(default=django.utils.timezone.now)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='description_versions', to='db.issue')),
('owned_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_description_versions', to=settings.AUTH_USER_MODEL)),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')),
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')),
],
options={
'verbose_name': 'Issue Description Version',
'verbose_name_plural': 'Issue Description Versions',
'db_table': 'issue_description_versions',
},
),
]

View File

@@ -41,6 +41,8 @@ from .issue import (
IssueSequence,
IssueSubscriber,
IssueVote,
IssueVersion,
IssueDescriptionVersion,
)
from .module import Module, ModuleIssue, ModuleLink, ModuleMember, ModuleUserProperties
from .notification import EmailNotificationLog, Notification, UserNotificationPreference
@@ -53,7 +55,6 @@ from .project import (
ProjectMemberInvite,
ProjectPublicMember,
)
from .deploy_board import DeployBoard
from .session import Session
from .social_connection import SocialLoginConnection
from .state import State
@@ -69,24 +70,6 @@ from .workspace import (
WorkspaceUserProperties,
)
from .importer import Importer
from .page import Page, PageLog, PageLabel
from .estimate import Estimate, EstimatePoint
from .intake import Intake, IntakeIssue
from .analytic import AnalyticView
from .notification import Notification, UserNotificationPreference, EmailNotificationLog
from .exporter import ExporterHistory
from .webhook import Webhook, WebhookLog
from .dashboard import Dashboard, DashboardWidget, Widget
from .favorite import UserFavorite
from .issue_type import IssueType
@@ -96,3 +79,5 @@ from .recent_visit import UserRecentVisit
from .label import Label
from .device import Device, DeviceSession
from .sticky import Sticky

View File

@@ -44,45 +44,25 @@ class FileAsset(BaseModel):
"db.User", on_delete=models.CASCADE, null=True, related_name="assets"
)
workspace = models.ForeignKey(
"db.Workspace",
on_delete=models.CASCADE,
null=True,
related_name="assets",
"db.Workspace", on_delete=models.CASCADE, null=True, related_name="assets"
)
draft_issue = models.ForeignKey(
"db.DraftIssue",
on_delete=models.CASCADE,
null=True,
related_name="assets",
"db.DraftIssue", on_delete=models.CASCADE, null=True, related_name="assets"
)
project = models.ForeignKey(
"db.Project",
on_delete=models.CASCADE,
null=True,
related_name="assets",
"db.Project", on_delete=models.CASCADE, null=True, related_name="assets"
)
issue = models.ForeignKey(
"db.Issue", on_delete=models.CASCADE, null=True, related_name="assets"
)
comment = models.ForeignKey(
"db.IssueComment",
on_delete=models.CASCADE,
null=True,
related_name="assets",
"db.IssueComment", on_delete=models.CASCADE, null=True, related_name="assets"
)
page = models.ForeignKey(
"db.Page", on_delete=models.CASCADE, null=True, related_name="assets"
)
entity_type = models.CharField(
max_length=255,
null=True,
blank=True,
)
entity_identifier = models.CharField(
max_length=255,
null=True,
blank=True,
)
entity_type = models.CharField(max_length=255, null=True, blank=True)
entity_identifier = models.CharField(max_length=255, null=True, blank=True)
is_deleted = models.BooleanField(default=False)
is_archived = models.BooleanField(default=False)
external_id = models.CharField(max_length=255, null=True, blank=True)

View File

@@ -15,6 +15,7 @@ from django import apps
from plane.utils.html_processor import strip_tags
from plane.db.mixins import SoftDeletionManager
from plane.utils.exception_logger import log_exception
from .base import BaseModel
from .project import ProjectBaseModel
@@ -660,11 +661,6 @@ class IssueVote(ProjectBaseModel):
class IssueVersion(ProjectBaseModel):
issue = models.ForeignKey(
"db.Issue",
on_delete=models.CASCADE,
related_name="versions",
)
PRIORITY_CHOICES = (
("urgent", "Urgent"),
("high", "High"),
@@ -672,14 +668,11 @@ class IssueVersion(ProjectBaseModel):
("low", "Low"),
("none", "None"),
)
parent = models.UUIDField(blank=True, null=True)
state = models.UUIDField(blank=True, null=True)
estimate_point = models.UUIDField(blank=True, null=True)
name = models.CharField(max_length=255, verbose_name="Issue Name")
description = models.JSONField(blank=True, default=dict)
description_html = models.TextField(blank=True, default="<p></p>")
description_stripped = models.TextField(blank=True, null=True)
description_binary = models.BinaryField(null=True)
priority = models.CharField(
max_length=30,
choices=PRIORITY_CHOICES,
@@ -688,9 +681,9 @@ class IssueVersion(ProjectBaseModel):
)
start_date = models.DateField(null=True, blank=True)
target_date = models.DateField(null=True, blank=True)
sequence_id = models.IntegerField(
default=1, verbose_name="Issue Sequence ID"
)
assignees = ArrayField(models.UUIDField(), blank=True, default=list)
sequence_id = models.IntegerField(default=1, verbose_name="Issue Sequence ID")
labels = ArrayField(models.UUIDField(), blank=True, default=list)
sort_order = models.FloatField(default=65535)
completed_at = models.DateTimeField(null=True)
archived_at = models.DateField(null=True)
@@ -698,29 +691,26 @@ class IssueVersion(ProjectBaseModel):
external_source = models.CharField(max_length=255, null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True)
type = models.UUIDField(blank=True, null=True)
cycle = models.UUIDField(null=True, blank=True)
modules = ArrayField(models.UUIDField(), blank=True, default=list)
properties = models.JSONField(default=dict) # issue properties
meta = models.JSONField(default=dict) # issue meta
last_saved_at = models.DateTimeField(default=timezone.now)
owned_by = models.UUIDField()
assignees = ArrayField(
models.UUIDField(),
blank=True,
default=list,
issue = models.ForeignKey(
"db.Issue", on_delete=models.CASCADE, related_name="versions"
)
labels = ArrayField(
models.UUIDField(),
blank=True,
default=list,
)
cycle = models.UUIDField(
activity = models.ForeignKey(
"db.IssueActivity",
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="versions",
)
modules = ArrayField(
models.UUIDField(),
blank=True,
default=list,
owned_by = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="issue_versions",
)
properties = models.JSONField(default=dict)
meta = models.JSONField(default=dict)
class Meta:
verbose_name = "Issue Version"
@@ -740,43 +730,93 @@ class IssueVersion(ProjectBaseModel):
Module = apps.get_model("db.Module")
CycleIssue = apps.get_model("db.CycleIssue")
IssueAssignee = apps.get_model("db.IssueAssignee")
IssueLabel = apps.get_model("db.IssueLabel")
cycle_issue = CycleIssue.objects.filter(
issue=issue,
).first()
cycle_issue = CycleIssue.objects.filter(issue=issue).first()
cls.objects.create(
issue=issue,
parent=issue.parent,
state=issue.state,
point=issue.point,
estimate_point=issue.estimate_point,
parent=issue.parent_id,
state=issue.state_id,
estimate_point=issue.estimate_point_id,
name=issue.name,
description=issue.description,
description_html=issue.description_html,
description_stripped=issue.description_stripped,
description_binary=issue.description_binary,
priority=issue.priority,
start_date=issue.start_date,
target_date=issue.target_date,
assignees=list(
IssueAssignee.objects.filter(issue=issue).values_list(
"assignee_id", flat=True
)
),
sequence_id=issue.sequence_id,
labels=list(
IssueLabel.objects.filter(issue=issue).values_list(
"label_id", flat=True
)
),
sort_order=issue.sort_order,
completed_at=issue.completed_at,
archived_at=issue.archived_at,
is_draft=issue.is_draft,
external_source=issue.external_source,
external_id=issue.external_id,
type=issue.type,
last_saved_at=issue.last_saved_at,
assignees=issue.assignees,
labels=issue.labels,
cycle=cycle_issue.cycle if cycle_issue else None,
modules=Module.objects.filter(issue=issue).values_list(
"id", flat=True
type=issue.type_id,
cycle=cycle_issue.cycle_id if cycle_issue else None,
modules=list(
Module.objects.filter(issue=issue).values_list("id", flat=True)
),
properties={},
meta={},
last_saved_at=timezone.now(),
owned_by=user,
)
return True
except Exception as e:
log_exception(e)
return False
class IssueDescriptionVersion(ProjectBaseModel):
issue = models.ForeignKey(
"db.Issue", on_delete=models.CASCADE, related_name="description_versions"
)
description_binary = models.BinaryField(null=True)
description_html = models.TextField(blank=True, default="<p></p>")
description_stripped = models.TextField(blank=True, null=True)
description_json = models.JSONField(default=dict, blank=True)
last_saved_at = models.DateTimeField(default=timezone.now)
owned_by = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="issue_description_versions",
)
class Meta:
verbose_name = "Issue Description Version"
verbose_name_plural = "Issue Description Versions"
db_table = "issue_description_versions"
@classmethod
def log_issue_description_version(cls, issue, user):
try:
"""
Log the issue description version
"""
cls.objects.create(
workspace_id=issue.workspace_id,
project_id=issue.project_id,
created_by_id=issue.created_by_id,
updated_by_id=issue.updated_by_id,
owned_by_id=user,
last_saved_at=timezone.now(),
issue_id=issue.id,
description_binary=issue.description_binary,
description_html=issue.description_html,
description_stripped=issue.description_stripped,
description_json=issue.description,
)
return True
except Exception as e:
log_exception(e)
return False

View File

@@ -0,0 +1,32 @@
# Django imports
from django.conf import settings
from django.db import models
# Module imports
from .base import BaseModel
class Sticky(BaseModel):
name = models.TextField()
description = models.JSONField(blank=True, default=dict)
description_html = models.TextField(blank=True, default="<p></p>")
description_stripped = models.TextField(blank=True, null=True)
description_binary = models.BinaryField(null=True)
logo_props = models.JSONField(default=dict)
color = models.CharField(max_length=255, blank=True, null=True)
background_color = models.CharField(max_length=255, blank=True, null=True)
workspace = models.ForeignKey(
"db.Workspace", on_delete=models.CASCADE, related_name="stickies"
)
owner = models.ForeignKey(
settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="stickies"
)
class Meta:
verbose_name = "Sticky"
verbose_name_plural = "Stickies"
db_table = "stickies"
ordering = ("-created_at",)

View File

@@ -26,6 +26,14 @@ def get_default_onboarding():
}
def get_mobile_default_onboarding():
return {
"profile_complete": False,
"workspace_create": False,
"workspace_join": False,
}
class User(AbstractBaseUser, PermissionsMixin):
id = models.UUIDField(
default=uuid.uuid4, unique=True, editable=False, db_index=True, primary_key=True
@@ -178,6 +186,12 @@ class Profile(TimeAuditModel):
billing_address = models.JSONField(null=True)
has_billing_address = models.BooleanField(default=False)
company_name = models.CharField(max_length=255, blank=True)
# mobile
is_mobile_onboarded = models.BooleanField(default=False)
mobile_onboarding_step = models.JSONField(default=get_mobile_default_onboarding)
mobile_timezone_auto_set = models.BooleanField(default=False)
# language
language = models.CharField(max_length=255, default="en")
class Meta:
verbose_name = "Profile"

View File

@@ -29,9 +29,7 @@ def validate_domain(value):
class Webhook(BaseModel):
workspace = models.ForeignKey(
"db.Workspace",
on_delete=models.CASCADE,
related_name="workspace_webhooks",
"db.Workspace", on_delete=models.CASCADE, related_name="workspace_webhooks"
)
url = models.URLField(
validators=[validate_schema, validate_domain], max_length=1024

View File

@@ -102,12 +102,7 @@ def get_default_display_properties():
def get_issue_props():
return {
"subscribed": True,
"assigned": True,
"created": True,
"all_issues": True,
}
return {"subscribed": True, "assigned": True, "created": True, "all_issues": True}
def slug_validator(value):
@@ -136,9 +131,7 @@ class Workspace(BaseModel):
max_length=48, db_index=True, unique=True, validators=[slug_validator]
)
organization_size = models.CharField(max_length=20, blank=True, null=True)
timezone = models.CharField(
max_length=255, default="UTC", choices=TIMEZONE_CHOICES
)
timezone = models.CharField(max_length=255, default="UTC", choices=TIMEZONE_CHOICES)
def __str__(self):
"""Return name of the Workspace"""
@@ -167,10 +160,7 @@ class WorkspaceBaseModel(BaseModel):
"db.Workspace", models.CASCADE, related_name="workspace_%(class)s"
)
project = models.ForeignKey(
"db.Project",
models.CASCADE,
related_name="project_%(class)s",
null=True,
"db.Project", models.CASCADE, related_name="project_%(class)s", null=True
)
class Meta:
@@ -184,9 +174,7 @@ class WorkspaceBaseModel(BaseModel):
class WorkspaceMember(BaseModel):
workspace = models.ForeignKey(
"db.Workspace",
on_delete=models.CASCADE,
related_name="workspace_member",
"db.Workspace", on_delete=models.CASCADE, related_name="workspace_member"
)
member = models.ForeignKey(
settings.AUTH_USER_MODEL,
@@ -221,9 +209,7 @@ class WorkspaceMember(BaseModel):
class WorkspaceMemberInvite(BaseModel):
workspace = models.ForeignKey(
"db.Workspace",
on_delete=models.CASCADE,
related_name="workspace_member_invite",
"db.Workspace", on_delete=models.CASCADE, related_name="workspace_member_invite"
)
email = models.CharField(max_length=255)
accepted = models.BooleanField(default=False)
@@ -283,9 +269,7 @@ class WorkspaceTheme(BaseModel):
)
name = models.CharField(max_length=300)
actor = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="themes",
settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="themes"
)
colors = models.JSONField(default=dict)
@@ -320,9 +304,7 @@ class WorkspaceUserProperties(BaseModel):
)
filters = models.JSONField(default=get_default_filters)
display_filters = models.JSONField(default=get_default_display_filters)
display_properties = models.JSONField(
default=get_default_display_properties
)
display_properties = models.JSONField(default=get_default_display_properties)
class Meta:
unique_together = ["workspace", "user", "deleted_at"]

View File

@@ -2,4 +2,4 @@ from .instance import InstanceSerializer
from .configuration import InstanceConfigurationSerializer
from .admin import InstanceAdminSerializer, InstanceAdminMeSerializer
from .workspace import WorkspaceSerializer
from .workspace import WorkspaceSerializer

View File

@@ -1,6 +1,8 @@
from .base import BaseSerializer
from plane.db.models import User
class UserLiteSerializer(BaseSerializer):
class Meta:
model = User
fields = ["id", "email", "first_name", "last_name",]
fields = ["id", "email", "first_name", "last_name"]

View File

@@ -13,6 +13,8 @@ from .admin import (
InstanceAdminUserSessionEndpoint,
)
from .changelog import ChangeLogEndpoint
from .workspace import InstanceWorkSpaceAvailabilityCheckEndpoint, InstanceWorkSpaceEndpoint
from .workspace import (
InstanceWorkSpaceAvailabilityCheckEndpoint,
InstanceWorkSpaceEndpoint,
)

View File

@@ -1,33 +0,0 @@
# Python imports
import requests
# Django imports
from django.conf import settings
# Third party imports
from rest_framework.response import Response
from rest_framework import status
from rest_framework.permissions import AllowAny
# plane imports
from .base import BaseAPIView
class ChangeLogEndpoint(BaseAPIView):
permission_classes = [AllowAny]
def fetch_change_logs(self):
response = requests.get(settings.INSTANCE_CHANGELOG_URL)
response.raise_for_status()
return response.json()
def get(self, request):
# Fetch the changelog
if settings.INSTANCE_CHANGELOG_URL:
data = self.fetch_change_logs()
return Response(data, status=status.HTTP_200_OK)
else:
return Response(
{"error": "could not fetch changelog please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@@ -43,19 +43,19 @@ class InstanceWorkSpaceEndpoint(BaseAPIView):
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
member_count = (
WorkspaceMember.objects.filter(
workspace=OuterRef("id"), member__is_bot=False, is_active=True
).select_related("owner")
)
.select_related("owner")
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
workspaces = Workspace.objects.annotate(
total_projects=project_count,
total_members=member_count,
total_projects=project_count, total_members=member_count
)
# Add search functionality
@@ -66,16 +66,14 @@ class InstanceWorkSpaceEndpoint(BaseAPIView):
return self.paginate(
request=request,
queryset=workspaces,
on_results=lambda results: WorkspaceSerializer(
results, many=True,
).data,
on_results=lambda results: WorkspaceSerializer(results, many=True).data,
max_per_page=10,
default_per_page=10,
)
def post(self, request):
try:
serializer = WorkspaceSerializer (data=request.data)
serializer = WorkspaceSerializer(data=request.data)
slug = request.data.get("slug", False)
name = request.data.get("name", False)

View File

@@ -11,14 +11,12 @@ from plane.license.api.views import (
InstanceAdminUserMeEndpoint,
InstanceAdminSignOutEndpoint,
InstanceAdminUserSessionEndpoint,
ChangeLogEndpoint,
InstanceWorkSpaceAvailabilityCheckEndpoint,
InstanceWorkSpaceEndpoint,
)
urlpatterns = [
path("", InstanceEndpoint.as_view(), name="instance"),
path("changelog/", ChangeLogEndpoint.as_view(), name="instance-changelog"),
path("admins/", InstanceAdminEndpoint.as_view(), name="instance-admins"),
path("admins/me/", InstanceAdminUserMeEndpoint.as_view(), name="instance-admins"),
path(
@@ -62,9 +60,5 @@ urlpatterns = [
InstanceWorkSpaceAvailabilityCheckEndpoint.as_view(),
name="instance-workspace-availability",
),
path(
"workspaces/",
InstanceWorkSpaceEndpoint.as_view(),
name="instance-workspace",
),
path("workspaces/", InstanceWorkSpaceEndpoint.as_view(), name="instance-workspace"),
]

View File

@@ -262,6 +262,9 @@ CELERY_IMPORTS = (
"plane.license.bgtasks.tracer",
# management tasks
"plane.bgtasks.dummy_data_task",
# issue version tasks
"plane.bgtasks.issue_version_sync",
"plane.bgtasks.issue_description_version_sync",
)
# Sentry Settings

View File

@@ -10,9 +10,15 @@ from plane.space.views import (
ProjectStatesEndpoint,
ProjectLabelsEndpoint,
ProjectMembersEndpoint,
ProjectMetaDataEndpoint,
)
urlpatterns = [
path(
"anchor/<str:anchor>/meta/",
ProjectMetaDataEndpoint.as_view(),
name="project-meta",
),
path(
"anchor/<str:anchor>/settings/",
ProjectDeployBoardPublicSettingsEndpoint.as_view(),

View File

@@ -25,3 +25,5 @@ from .state import ProjectStatesEndpoint
from .label import ProjectLabelsEndpoint
from .asset import EntityAssetEndpoint, AssetRestoreEndpoint, EntityBulkAssetEndpoint
from .meta import ProjectMetaDataEndpoint

View File

@@ -0,0 +1,34 @@
# third party
from rest_framework.permissions import AllowAny
from rest_framework import status
from rest_framework.response import Response
from plane.db.models import DeployBoard, Project
from .base import BaseAPIView
from plane.space.serializer.project import ProjectLiteSerializer
class ProjectMetaDataEndpoint(BaseAPIView):
permission_classes = [AllowAny]
def get(self, request, anchor):
try:
deploy_board = DeployBoard.objects.filter(
anchor=anchor, entity_name="project"
).first()
except DeployBoard.DoesNotExist:
return Response(
{"error": "Project is not published"}, status=status.HTTP_404_NOT_FOUND
)
try:
project_id = deploy_board.entity_identifier
project = Project.objects.get(id=project_id)
except Project.DoesNotExist:
return Response(
{"error": "Project is not published"}, status=status.HTTP_404_NOT_FOUND
)
serializer = ProjectLiteSerializer(project)
return Response(serializer.data, status=status.HTTP_200_OK)

View File

@@ -0,0 +1,100 @@
import pytz
from plane.db.models import Project
from datetime import datetime, time
from datetime import timedelta
def user_timezone_converter(queryset, datetime_fields, user_timezone):
# Create a timezone object for the user's timezone
user_tz = pytz.timezone(user_timezone)
# Check if queryset is a dictionary (single item) or a list of dictionaries
if isinstance(queryset, dict):
queryset_values = [queryset]
else:
queryset_values = list(queryset)
# Iterate over the dictionaries in the list
for item in queryset_values:
# Iterate over the datetime fields
for field in datetime_fields:
# Convert the datetime field to the user's timezone
if field in item and item[field]:
item[field] = item[field].astimezone(user_tz)
# If queryset was a single item, return a single item
if isinstance(queryset, dict):
return queryset_values[0]
else:
return queryset_values
def convert_to_utc(date, project_id, is_start_date=False):
"""
Converts a start date string to the project's local timezone at 12:00 AM
and then converts it to UTC for storage.
Args:
date (str): The date string in "YYYY-MM-DD" format.
project_id (int): The project's ID to fetch the associated timezone.
Returns:
datetime: The UTC datetime.
"""
# Retrieve the project's timezone using the project ID
project = Project.objects.get(id=project_id)
project_timezone = project.timezone
if not date or not project_timezone:
raise ValueError("Both date and timezone must be provided.")
# Parse the string into a date object
start_date = datetime.strptime(date, "%Y-%m-%d").date()
# Get the project's timezone
local_tz = pytz.timezone(project_timezone)
# Combine the date with 12:00 AM time
local_datetime = datetime.combine(start_date, time.min)
# Localize the datetime to the project's timezone
localized_datetime = local_tz.localize(local_datetime)
# If it's an start date, add one minute
if is_start_date:
localized_datetime += timedelta(minutes=1)
# Convert the localized datetime to UTC
utc_datetime = localized_datetime.astimezone(pytz.utc)
# Return the UTC datetime for storage
return utc_datetime
def convert_utc_to_project_timezone(utc_datetime, project_id):
"""
Converts a UTC datetime (stored in the database) to the project's local timezone.
Args:
utc_datetime (datetime): The UTC datetime to be converted.
project_id (int): The project's ID to fetch the associated timezone.
Returns:
datetime: The datetime in the project's local timezone.
"""
# Retrieve the project's timezone using the project ID
project = Project.objects.get(id=project_id)
project_timezone = project.timezone
if not project_timezone:
raise ValueError("Project timezone must be provided.")
# Get the timezone object for the project's timezone
local_tz = pytz.timezone(project_timezone)
# Convert the UTC datetime to the project's local timezone
if utc_datetime.tzinfo is None:
# Localize UTC datetime if it's naive (i.e., without timezone info)
utc_datetime = pytz.utc.localize(utc_datetime)
# Convert to the project's local timezone
local_datetime = utc_datetime.astimezone(local_tz)
return local_datetime

View File

@@ -1,26 +0,0 @@
import pytz
def user_timezone_converter(queryset, datetime_fields, user_timezone):
# Create a timezone object for the user's timezone
user_tz = pytz.timezone(user_timezone)
# Check if queryset is a dictionary (single item) or a list of dictionaries
if isinstance(queryset, dict):
queryset_values = [queryset]
else:
queryset_values = list(queryset)
# Iterate over the dictionaries in the list
for item in queryset_values:
# Iterate over the datetime fields
for field in datetime_fields:
# Convert the datetime field to the user's timezone
if field in item and item[field]:
item[field] = item[field].astimezone(user_tz)
# If queryset was a single item, return a single item
if isinstance(queryset, dict):
return queryset_values[0]
else:
return queryset_values

View File

@@ -1,7 +1,7 @@
# base requirements
# django
Django==4.2.16
Django==4.2.17
# rest framework
djangorestframework==3.15.2
# postgres

View File

@@ -70,7 +70,7 @@
"value": ""
},
"GITHUB_CLIENT_SECRET": {
"description": "Github Client Secret",
"description": "GitHub Client Secret",
"value": ""
},
"NEXT_PUBLIC_API_BASE_URL": {

View File

@@ -62,7 +62,7 @@ mkdir plane-selfhost
cd plane-selfhost
curl -fsSL -o setup.sh https://raw.githubusercontent.com/makeplane/plane/master/deploy/selfhost/install.sh
curl -fsSL -o setup.sh https://github.com/makeplane/plane/releases/latest/download/setup.sh
chmod +x setup.sh
```

View File

@@ -1,54 +1,63 @@
x-app-env: &app-env
environment:
- NGINX_PORT=${NGINX_PORT:-80}
- WEB_URL=${WEB_URL:-http://localhost}
- DEBUG=${DEBUG:-0}
- SENTRY_DSN=${SENTRY_DSN:-""}
- SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT:-"production"}
- CORS_ALLOWED_ORIGINS=${CORS_ALLOWED_ORIGINS:-}
# Gunicorn Workers
- GUNICORN_WORKERS=${GUNICORN_WORKERS:-1}
#DB SETTINGS
- PGHOST=${PGHOST:-plane-db}
- PGDATABASE=${PGDATABASE:-plane}
- POSTGRES_USER=${POSTGRES_USER:-plane}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-plane}
- POSTGRES_DB=${POSTGRES_DB:-plane}
- POSTGRES_PORT=${POSTGRES_PORT:-5432}
- PGDATA=${PGDATA:-/var/lib/postgresql/data}
- DATABASE_URL=${DATABASE_URL:-postgresql://plane:plane@plane-db/plane}
# REDIS SETTINGS
- REDIS_HOST=${REDIS_HOST:-plane-redis}
- REDIS_PORT=${REDIS_PORT:-6379}
- REDIS_URL=${REDIS_URL:-redis://plane-redis:6379/}
x-db-env: &db-env
PGHOST: ${PGHOST:-plane-db}
PGDATABASE: ${PGDATABASE:-plane}
POSTGRES_USER: ${POSTGRES_USER:-plane}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-plane}
POSTGRES_DB: ${POSTGRES_DB:-plane}
POSTGRES_PORT: ${POSTGRES_PORT:-5432}
PGDATA: ${PGDATA:-/var/lib/postgresql/data}
x-redis-env: &redis-env
REDIS_HOST: ${REDIS_HOST:-plane-redis}
REDIS_PORT: ${REDIS_PORT:-6379}
REDIS_URL: ${REDIS_URL:-redis://plane-redis:6379/}
x-minio-env: &minio-env
MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID:-access-key}
MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY:-secret-key}
x-aws-s3-env: &aws-s3-env
AWS_REGION: ${AWS_REGION:-}
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID:-access-key}
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY:-secret-key}
AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000}
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads}
x-proxy-env: &proxy-env
NGINX_PORT: ${NGINX_PORT:-80}
BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads}
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880}
x-mq-env: &mq-env
# RabbitMQ Settings
RABBITMQ_HOST: ${RABBITMQ_HOST:-plane-mq}
RABBITMQ_PORT: ${RABBITMQ_PORT:-5672}
RABBITMQ_DEFAULT_USER: ${RABBITMQ_USER:-plane}
RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD:-plane}
RABBITMQ_DEFAULT_VHOST: ${RABBITMQ_VHOST:-plane}
RABBITMQ_VHOST: ${RABBITMQ_VHOST:-plane}
x-live-env: &live-env
API_BASE_URL: ${API_BASE_URL:-http://api:8000}
x-app-env: &app-env
WEB_URL: ${WEB_URL:-http://localhost}
DEBUG: ${DEBUG:-0}
SENTRY_DSN: ${SENTRY_DSN}
SENTRY_ENVIRONMENT: ${SENTRY_ENVIRONMENT:-production}
CORS_ALLOWED_ORIGINS: ${CORS_ALLOWED_ORIGINS}
GUNICORN_WORKERS: 1
USE_MINIO: ${USE_MINIO:-1}
DATABASE_URL: ${DATABASE_URL:-postgresql://plane:plane@plane-db/plane}
SECRET_KEY: ${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5}
ADMIN_BASE_URL: ${ADMIN_BASE_URL}
SPACE_BASE_URL: ${SPACE_BASE_URL}
APP_BASE_URL: ${APP_BASE_URL}
AMQP_URL: ${AMQP_URL:-amqp://plane:plane@plane-mq:5672/plane}
# RabbitMQ Settings
- RABBITMQ_HOST=${RABBITMQ_HOST:-plane-mq}
- RABBITMQ_PORT=${RABBITMQ_PORT:-5672}
- RABBITMQ_DEFAULT_USER=${RABBITMQ_USER:-plane}
- RABBITMQ_DEFAULT_PASS=${RABBITMQ_PASSWORD:-plane}
- RABBITMQ_DEFAULT_VHOST=${RABBITMQ_VHOST:-plane}
- RABBITMQ_VHOST=${RABBITMQ_VHOST:-plane}
- AMQP_URL=${AMQP_URL:-amqp://plane:plane@plane-mq:5672/plane}
# Application secret
- SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5}
# DATA STORE SETTINGS
- USE_MINIO=${USE_MINIO:-1}
- AWS_REGION=${AWS_REGION:-}
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-"access-key"}
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-"secret-key"}
- AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000}
- AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads}
- MINIO_ROOT_USER=${MINIO_ROOT_USER:-"access-key"}
- MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-"secret-key"}
- BUCKET_NAME=${BUCKET_NAME:-uploads}
- FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
# Live server env
- API_BASE_URL=${API_BASE_URL:-http://api:8000}
services:
web:
<<: *app-env
image: ${DOCKERHUB_USER:-makeplane}/plane-frontend:${APP_RELEASE:-stable}
platform: ${DOCKER_PLATFORM:-}
pull_policy: if_not_present
@@ -61,7 +70,6 @@ services:
- worker
space:
<<: *app-env
image: ${DOCKERHUB_USER:-makeplane}/plane-space:${APP_RELEASE:-stable}
platform: ${DOCKER_PLATFORM:-}
pull_policy: if_not_present
@@ -75,7 +83,6 @@ services:
- web
admin:
<<: *app-env
image: ${DOCKERHUB_USER:-makeplane}/plane-admin:${APP_RELEASE:-stable}
platform: ${DOCKER_PLATFORM:-}
pull_policy: if_not_present
@@ -88,12 +95,13 @@ services:
- web
live:
<<: *app-env
image: ${DOCKERHUB_USER:-makeplane}/plane-live:${APP_RELEASE:-stable}
platform: ${DOCKER_PLATFORM:-}
pull_policy: if_not_present
restart: unless-stopped
command: node live/dist/server.js live
environment:
<<: [ *live-env ]
deploy:
replicas: ${LIVE_REPLICAS:-1}
depends_on:
@@ -101,7 +109,6 @@ services:
- web
api:
<<: *app-env
image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-stable}
platform: ${DOCKER_PLATFORM:-}
pull_policy: if_not_present
@@ -111,14 +118,14 @@ services:
replicas: ${API_REPLICAS:-1}
volumes:
- logs_api:/code/plane/logs
environment:
<<: [ *app-env, *db-env, *redis-env, *minio-env, *aws-s3-env, *proxy-env ]
depends_on:
- plane-db
- plane-redis
- plane-mq
worker:
<<: *app-env
image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-stable}
platform: ${DOCKER_PLATFORM:-}
pull_policy: if_not_present
@@ -126,6 +133,8 @@ services:
command: ./bin/docker-entrypoint-worker.sh
volumes:
- logs_worker:/code/plane/logs
environment:
<<: [ *app-env, *db-env, *redis-env, *minio-env, *aws-s3-env, *proxy-env ]
depends_on:
- api
- plane-db
@@ -133,7 +142,6 @@ services:
- plane-mq
beat-worker:
<<: *app-env
image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-stable}
platform: ${DOCKER_PLATFORM:-}
pull_policy: if_not_present
@@ -141,6 +149,8 @@ services:
command: ./bin/docker-entrypoint-beat.sh
volumes:
- logs_beat-worker:/code/plane/logs
environment:
<<: [ *app-env, *db-env, *redis-env, *minio-env, *aws-s3-env, *proxy-env ]
depends_on:
- api
- plane-db
@@ -148,7 +158,6 @@ services:
- plane-mq
migrator:
<<: *app-env
image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-stable}
platform: ${DOCKER_PLATFORM:-}
pull_policy: if_not_present
@@ -156,21 +165,23 @@ services:
command: ./bin/docker-entrypoint-migrator.sh
volumes:
- logs_migrator:/code/plane/logs
environment:
<<: [ *app-env, *db-env, *redis-env, *minio-env, *aws-s3-env, *proxy-env ]
depends_on:
- plane-db
- plane-redis
plane-db:
<<: *app-env
image: postgres:15.7-alpine
pull_policy: if_not_present
restart: unless-stopped
command: postgres -c 'max_connections=1000'
environment:
<<: *db-env
volumes:
- pgdata:/var/lib/postgresql/data
plane-redis:
<<: *app-env
image: valkey/valkey:7.2.5-alpine
pull_policy: if_not_present
restart: unless-stopped
@@ -178,30 +189,33 @@ services:
- redisdata:/data
plane-mq:
<<: *app-env
image: rabbitmq:3.13.6-management-alpine
restart: always
environment:
<<: *mq-env
volumes:
- rabbitmq_data:/var/lib/rabbitmq
plane-minio:
<<: *app-env
image: minio/minio:latest
pull_policy: if_not_present
restart: unless-stopped
command: server /export --console-address ":9090"
environment:
<<: *minio-env
volumes:
- uploads:/export
# Comment this if you already have a reverse proxy running
proxy:
<<: *app-env
image: ${DOCKERHUB_USER:-makeplane}/plane-proxy:${APP_RELEASE:-stable}
platform: ${DOCKER_PLATFORM:-}
pull_policy: if_not_present
restart: unless-stopped
ports:
- ${NGINX_PORT}:80
environment:
<<: *proxy-env
depends_on:
- web
- api

View File

@@ -4,9 +4,12 @@ BRANCH=${BRANCH:-master}
SCRIPT_DIR=$PWD
SERVICE_FOLDER=plane-app
PLANE_INSTALL_DIR=$PWD/$SERVICE_FOLDER
export APP_RELEASE="stable"
export APP_RELEASE=stable
export DOCKERHUB_USER=makeplane
export PULL_POLICY=${PULL_POLICY:-if_not_present}
export GH_REPO=makeplane/plane
export RELEASE_DOWNLOAD_URL="https://github.com/$GH_REPO/releases/download"
export FALLBACK_DOWNLOAD_URL="https://raw.githubusercontent.com/$GH_REPO/$BRANCH/deploy/selfhost"
CPU_ARCH=$(uname -m)
OS_NAME=$(uname)
@@ -16,13 +19,6 @@ mkdir -p $PLANE_INSTALL_DIR/archive
DOCKER_FILE_PATH=$PLANE_INSTALL_DIR/docker-compose.yaml
DOCKER_ENV_PATH=$PLANE_INSTALL_DIR/plane.env
SED_PREFIX=()
if [ "$OS_NAME" == "Darwin" ]; then
SED_PREFIX=("-i" "")
else
SED_PREFIX=("-i")
fi
function print_header() {
clear
@@ -59,6 +55,17 @@ function spinner() {
printf " \b\b\b\b" >&2
}
function checkLatestRelease(){
echo "Checking for the latest release..." >&2
local latest_release=$(curl -s https://api.github.com/repos/$GH_REPO/releases/latest | grep -o '"tag_name": "[^"]*"' | sed 's/"tag_name": "//;s/"//g')
if [ -z "$latest_release" ]; then
echo "Failed to check for the latest release. Exiting..." >&2
exit 1
fi
echo $latest_release
}
function initialize(){
printf "Please wait while we check the availability of Docker images for the selected release ($APP_RELEASE) with ${UPPER_CPU_ARCH} support." >&2
@@ -130,8 +137,12 @@ function updateEnvFile() {
echo "$key=$value" >> "$file"
return
else
# if key exists, update the value
sed "${SED_PREFIX[@]}" "s/^$key=.*/$key=$value/g" "$file"
if [ "$OS_NAME" == "Darwin" ]; then
value=$(echo "$value" | sed 's/|/\\|/g')
sed -i '' "s|^$key=.*|$key=$value|g" "$file"
else
sed -i "s/^$key=.*/$key=$value/g" "$file"
fi
fi
else
echo "File not found: $file"
@@ -182,7 +193,7 @@ function buildYourOwnImage(){
local PLANE_TEMP_CODE_DIR=~/tmp/plane
rm -rf $PLANE_TEMP_CODE_DIR
mkdir -p $PLANE_TEMP_CODE_DIR
REPO=https://github.com/makeplane/plane.git
REPO=https://github.com/$GH_REPO.git
git clone "$REPO" "$PLANE_TEMP_CODE_DIR" --branch "$BRANCH" --single-branch --depth 1
cp "$PLANE_TEMP_CODE_DIR/deploy/selfhost/build.yml" "$PLANE_TEMP_CODE_DIR/build.yml"
@@ -204,6 +215,10 @@ function install() {
echo "Begin Installing Plane"
echo ""
if [ "$APP_RELEASE" == "stable" ]; then
export APP_RELEASE=$(checkLatestRelease)
fi
local build_image=$(initialize)
if [ "$build_image" == "build" ]; then
@@ -232,8 +247,49 @@ function download() {
mv $PLANE_INSTALL_DIR/docker-compose.yaml $PLANE_INSTALL_DIR/archive/$TS.docker-compose.yaml
fi
curl -H 'Cache-Control: no-cache, no-store' -s -o $PLANE_INSTALL_DIR/docker-compose.yaml https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/selfhost/docker-compose.yml?$(date +%s)
curl -H 'Cache-Control: no-cache, no-store' -s -o $PLANE_INSTALL_DIR/variables-upgrade.env https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/selfhost/variables.env?$(date +%s)
RESPONSE=$(curl -H 'Cache-Control: no-cache, no-store' -s -w "HTTPSTATUS:%{http_code}" "$RELEASE_DOWNLOAD_URL/$APP_RELEASE/docker-compose.yml?$(date +%s)")
BODY=$(echo "$RESPONSE" | sed -e 's/HTTPSTATUS\:.*//g')
STATUS=$(echo "$RESPONSE" | tr -d '\n' | sed -e 's/.*HTTPSTATUS://')
if [ "$STATUS" -eq 200 ]; then
echo "$BODY" > $PLANE_INSTALL_DIR/docker-compose.yaml
else
# Fallback to download from the raw github url
RESPONSE=$(curl -H 'Cache-Control: no-cache, no-store' -s -w "HTTPSTATUS:%{http_code}" "$FALLBACK_DOWNLOAD_URL/docker-compose.yml?$(date +%s)")
BODY=$(echo "$RESPONSE" | sed -e 's/HTTPSTATUS\:.*//g')
STATUS=$(echo "$RESPONSE" | tr -d '\n' | sed -e 's/.*HTTPSTATUS://')
if [ "$STATUS" -eq 200 ]; then
echo "$BODY" > $PLANE_INSTALL_DIR/docker-compose.yaml
else
echo "Failed to download docker-compose.yml. HTTP Status: $STATUS"
echo "URL: $RELEASE_DOWNLOAD_URL/$APP_RELEASE/docker-compose.yml"
mv $PLANE_INSTALL_DIR/archive/$TS.docker-compose.yaml $PLANE_INSTALL_DIR/docker-compose.yaml
exit 1
fi
fi
RESPONSE=$(curl -H 'Cache-Control: no-cache, no-store' -s -w "HTTPSTATUS:%{http_code}" "$RELEASE_DOWNLOAD_URL/$APP_RELEASE/variables.env?$(date +%s)")
BODY=$(echo "$RESPONSE" | sed -e 's/HTTPSTATUS\:.*//g')
STATUS=$(echo "$RESPONSE" | tr -d '\n' | sed -e 's/.*HTTPSTATUS://')
if [ "$STATUS" -eq 200 ]; then
echo "$BODY" > $PLANE_INSTALL_DIR/variables-upgrade.env
else
# Fallback to download from the raw github url
RESPONSE=$(curl -H 'Cache-Control: no-cache, no-store' -s -w "HTTPSTATUS:%{http_code}" "$FALLBACK_DOWNLOAD_URL/variables.env?$(date +%s)")
BODY=$(echo "$RESPONSE" | sed -e 's/HTTPSTATUS\:.*//g')
STATUS=$(echo "$RESPONSE" | tr -d '\n' | sed -e 's/.*HTTPSTATUS://')
if [ "$STATUS" -eq 200 ]; then
echo "$BODY" > $PLANE_INSTALL_DIR/variables-upgrade.env
else
echo "Failed to download variables.env. HTTP Status: $STATUS"
echo "URL: $RELEASE_DOWNLOAD_URL/$APP_RELEASE/variables.env"
mv $PLANE_INSTALL_DIR/archive/$TS.docker-compose.yaml $PLANE_INSTALL_DIR/docker-compose.yaml
exit 1
fi
fi
if [ -f "$DOCKER_ENV_PATH" ];
then
@@ -335,6 +391,34 @@ function restartServices() {
startServices
}
function upgrade() {
local latest_release=$(checkLatestRelease)
echo ""
echo "Current release: $APP_RELEASE"
if [ "$latest_release" == "$APP_RELEASE" ]; then
echo ""
echo "You are already using the latest release"
exit 0
fi
echo "Latest release: $latest_release"
echo ""
# Check for confirmation to upgrade
echo "Do you want to upgrade to the latest release ($latest_release)?"
read -p "Continue? [y/N]: " confirm
if [[ ! "$confirm" =~ ^[Yy]$ ]]; then
echo "Exiting..."
exit 0
fi
export APP_RELEASE=$latest_release
echo "Upgrading Plane to the latest release..."
echo ""
echo "***** STOPPING SERVICES ****"
stopServices

View File

@@ -47,9 +47,6 @@ AWS_ACCESS_KEY_ID=access-key
AWS_SECRET_ACCESS_KEY=secret-key
AWS_S3_ENDPOINT_URL=http://plane-minio:9000
AWS_S3_BUCKET_NAME=uploads
MINIO_ROOT_USER=access-key
MINIO_ROOT_PASSWORD=secret-key
BUCKET_NAME=uploads
FILE_SIZE_LIMIT=5242880
# Gunicorn Workers

View File

@@ -1,16 +1,16 @@
{
"name": "live",
"version": "0.24.0",
"version": "0.24.1",
"description": "",
"main": "./src/server.ts",
"private": true,
"type": "module",
"scripts": {
"dev": "concurrently \"babel src --out-dir dist --extensions '.ts,.js' --watch\" \"nodemon dist/server.js\"",
"build": "babel src --out-dir dist --extensions \".ts,.js\"",
"start": "node dist/server.js",
"lint": "eslint . --ext .ts,.tsx",
"dev": "concurrently \"babel src --out-dir dist --extensions '.ts,.js' --watch\" \"nodemon dist/server.js\"",
"lint:errors": "eslint . --ext .ts,.tsx --quiet"
"lint": "eslint src --ext .ts,.tsx",
"lint:errors": "eslint src --ext .ts,.tsx --quiet"
},
"keywords": [],
"author": "",
@@ -30,7 +30,7 @@
"compression": "^1.7.4",
"cors": "^2.8.5",
"dotenv": "^16.4.5",
"express": "^4.20.0",
"express": "^4.21.2",
"express-ws": "^5.0.2",
"helmet": "^7.1.0",
"ioredis": "^5.4.1",

View File

@@ -4,6 +4,10 @@ import { v4 as uuidv4 } from "uuid";
import { handleAuthentication } from "@/core/lib/authentication.js";
// extensions
import { getExtensions } from "@/core/extensions/index.js";
import {
DocumentCollaborativeEvents,
TDocumentEventsServer,
} from "@plane/editor/lib";
// editor types
import { TUserDetails } from "@plane/editor";
// types
@@ -55,6 +59,14 @@ export const getHocusPocusServer = async () => {
throw Error("Authentication unsuccessful!");
}
},
async onStateless({ payload, document }) {
// broadcast the client event (derived from the server event) to all the clients so that they can update their state
const response =
DocumentCollaborativeEvents[payload as TDocumentEventsServer].client;
if (response) {
document.broadcastStateless(response);
}
},
extensions,
debounce: 10000,
});

View File

@@ -1,6 +1,6 @@
{
"repository": "https://github.com/makeplane/plane.git",
"version": "0.24.0",
"version": "0.24.1",
"license": "AGPL-3.0",
"private": true,
"workspaces": [

View File

@@ -1,6 +1,6 @@
{
"name": "@plane/constants",
"version": "0.24.0",
"version": "0.24.1",
"private": true,
"main": "./index.ts"
"main": "./src/index.ts"
}

View File

@@ -0,0 +1,18 @@
export const API_BASE_URL = process.env.NEXT_PUBLIC_API_BASE_URL || "";
// PI Base Url
export const PI_BASE_URL = process.env.NEXT_PUBLIC_PI_BASE_URL || "";
// God Mode Admin App Base Url
export const ADMIN_BASE_URL = process.env.NEXT_PUBLIC_ADMIN_BASE_URL || "";
export const ADMIN_BASE_PATH = process.env.NEXT_PUBLIC_ADMIN_BASE_PATH || "";
export const GOD_MODE_URL = encodeURI(`${ADMIN_BASE_URL}${ADMIN_BASE_PATH}/`);
// Publish App Base Url
export const SPACE_BASE_URL = process.env.NEXT_PUBLIC_SPACE_BASE_URL || "";
export const SPACE_BASE_PATH = process.env.NEXT_PUBLIC_SPACE_BASE_PATH || "";
export const SITES_URL = encodeURI(`${SPACE_BASE_URL}${SPACE_BASE_PATH}/`);
// Live App Base Url
export const LIVE_BASE_URL = process.env.NEXT_PUBLIC_LIVE_BASE_URL || "";
export const LIVE_BASE_PATH = process.env.NEXT_PUBLIC_LIVE_BASE_PATH || "";
export const LIVE_URL = encodeURI(`${LIVE_BASE_URL}${LIVE_BASE_PATH}/`);
// plane website url
export const WEBSITE_URL =
process.env.NEXT_PUBLIC_WEBSITE_URL || "https://plane.so";

View File

@@ -1,3 +1,4 @@
export * from "./auth";
export * from "./endpoints";
export * from "./issue";
export * from "./workspace";

View File

@@ -11,6 +11,7 @@ export enum EIssueGroupByToServerOptions {
"target_date" = "target_date",
"project" = "project_id",
"created_by" = "created_by",
"team_project" = "project_id",
}
export enum EIssueGroupBYServerToProperty {

View File

@@ -0,0 +1,76 @@
export const ORGANIZATION_SIZE = [
"Just myself",
"2-10",
"11-50",
"51-200",
"201-500",
"500+",
];
export const RESTRICTED_URLS = [
"404",
"accounts",
"api",
"create-workspace",
"god-mode",
"installations",
"invitations",
"onboarding",
"profile",
"spaces",
"workspace-invitations",
"password",
"flags",
"monitor",
"monitoring",
"ingest",
"plane-pro",
"plane-ultimate",
"enterprise",
"plane-enterprise",
"disco",
"silo",
"chat",
"calendar",
"drive",
"channels",
"upgrade",
"billing",
"sign-in",
"sign-up",
"signin",
"signup",
"config",
"live",
"admin",
"m",
"import",
"importers",
"integrations",
"integration",
"configuration",
"initiatives",
"initiative",
"config",
"workflow",
"workflows",
"epics",
"epic",
"story",
"mobile",
"dashboard",
"desktop",
"onload",
"real-time",
"one",
"pages",
"mobile",
"business",
"pro",
"settings",
"monitor",
"license",
"licenses",
"instances",
"instance",
];

View File

@@ -1,23 +0,0 @@
export const ORGANIZATION_SIZE = [
"Just myself",
"2-10",
"11-50",
"51-200",
"201-500",
"500+",
];
export const RESTRICTED_URLS = [
"404",
"accounts",
"api",
"create-workspace",
"error",
"god-mode",
"installations",
"invitations",
"onboarding",
"profile",
"spaces",
"workspace-invitations",
];

View File

@@ -1,6 +1,6 @@
{
"name": "@plane/editor",
"version": "0.24.0",
"version": "0.24.1",
"description": "Core Editor that powers Plane",
"private": true,
"main": "./dist/index.mjs",
@@ -27,6 +27,7 @@
"dev": "tsup --watch",
"check-types": "tsc --noEmit",
"lint": "eslint src --ext .ts,.tsx",
"lint:errors": "eslint src --ext .ts,.tsx --quiet",
"format": "prettier --write \"**/*.{ts,tsx,md}\""
},
"peerDependencies": {
@@ -36,8 +37,8 @@
"dependencies": {
"@floating-ui/react": "^0.26.4",
"@hocuspocus/provider": "^2.13.5",
"@plane/helpers": "*",
"@plane/ui": "*",
"@plane/utils": "*",
"@tiptap/core": "^2.1.13",
"@tiptap/extension-blockquote": "^2.1.13",
"@tiptap/extension-character-count": "^2.6.5",

View File

@@ -0,0 +1,12 @@
import { Extensions } from "@tiptap/core";
// types
import { TExtensions } from "@/types";
type Props = {
disabledExtensions: TExtensions[];
};
export const CoreEditorAdditionalExtensions = (props: Props): Extensions => {
const {} = props;
return [];
};

View File

@@ -0,0 +1,2 @@
export * from "./extensions";
export * from "./read-only-extensions";

View File

@@ -0,0 +1,12 @@
import { Extensions } from "@tiptap/core";
// types
import { TExtensions } from "@/types";
type Props = {
disabledExtensions: TExtensions[];
};
export const CoreReadOnlyEditorAdditionalExtensions = (props: Props): Extensions => {
const {} = props;
return [];
};

View File

@@ -0,0 +1,3 @@
import { Extensions } from "@tiptap/core";
export const CoreEditorAdditionalExtensionsWithoutProps: Extensions = [];

View File

@@ -15,7 +15,13 @@ type Props = {
export const DocumentEditorAdditionalExtensions = (_props: Props) => {
const { disabledExtensions } = _props;
const extensions: Extensions = disabledExtensions?.includes("slash-commands") ? [] : [SlashCommands()];
const extensions: Extensions = disabledExtensions?.includes("slash-commands")
? []
: [
SlashCommands({
disabledExtensions,
}),
];
return extensions;
};

View File

@@ -1 +1,3 @@
export * from "./core";
export * from "./document-extensions";
export * from "./slash-commands";

View File

@@ -0,0 +1,14 @@
// extensions
import { TSlashCommandAdditionalOption } from "@/extensions";
// types
import { TExtensions } from "@/types";
type Props = {
disabledExtensions: TExtensions[];
};
export const coreEditorAdditionalSlashCommandOptions = (props: Props): TSlashCommandAdditionalOption[] => {
const {} = props;
const options: TSlashCommandAdditionalOption[] = [];
return options;
};

View File

@@ -15,6 +15,7 @@ import { EditorReadOnlyRefApi, ICollaborativeDocumentReadOnlyEditor } from "@/ty
const CollaborativeDocumentReadOnlyEditor = (props: ICollaborativeDocumentReadOnlyEditor) => {
const {
containerClassName,
disabledExtensions,
displayConfig = DEFAULT_DISPLAY_CONFIG,
editorClassName = "",
embedHandler,
@@ -37,6 +38,7 @@ const CollaborativeDocumentReadOnlyEditor = (props: ICollaborativeDocumentReadOn
}
const { editor, hasServerConnectionFailed, hasServerSynced } = useReadOnlyCollaborativeEditor({
disabledExtensions,
editorClassName,
extensions,
fileHandler,

View File

@@ -10,9 +10,10 @@ import { getEditorClassNames } from "@/helpers/common";
// hooks
import { useReadOnlyEditor } from "@/hooks/use-read-only-editor";
// types
import { EditorReadOnlyRefApi, IMentionHighlight, TDisplayConfig, TFileHandler } from "@/types";
import { EditorReadOnlyRefApi, IMentionHighlight, TDisplayConfig, TExtensions, TFileHandler } from "@/types";
interface IDocumentReadOnlyEditor {
disabledExtensions: TExtensions[];
id: string;
initialValue: string;
containerClassName: string;
@@ -31,6 +32,7 @@ interface IDocumentReadOnlyEditor {
const DocumentReadOnlyEditor = (props: IDocumentReadOnlyEditor) => {
const {
containerClassName,
disabledExtensions,
displayConfig = DEFAULT_DISPLAY_CONFIG,
editorClassName = "",
embedHandler,
@@ -51,6 +53,7 @@ const DocumentReadOnlyEditor = (props: IDocumentReadOnlyEditor) => {
}
const editor = useReadOnlyEditor({
disabledExtensions,
editorClassName,
extensions,
fileHandler,

View File

@@ -19,6 +19,7 @@ export const EditorWrapper: React.FC<Props> = (props) => {
const {
children,
containerClassName,
disabledExtensions,
displayConfig = DEFAULT_DISPLAY_CONFIG,
editorClassName = "",
extensions,
@@ -37,6 +38,7 @@ export const EditorWrapper: React.FC<Props> = (props) => {
} = props;
const editor = useEditor({
disabledExtensions,
editorClassName,
enableHistory: true,
extensions,

View File

@@ -12,6 +12,7 @@ import { IReadOnlyEditorProps } from "@/types";
export const ReadOnlyEditorWrapper = (props: IReadOnlyEditorProps) => {
const {
containerClassName,
disabledExtensions,
displayConfig = DEFAULT_DISPLAY_CONFIG,
editorClassName = "",
fileHandler,
@@ -22,6 +23,7 @@ export const ReadOnlyEditorWrapper = (props: IReadOnlyEditorProps) => {
} = props;
const editor = useReadOnlyEditor({
disabledExtensions,
editorClassName,
fileHandler,
forwardedRef,

View File

@@ -8,12 +8,7 @@ import { SideMenuExtension, SlashCommands } from "@/extensions";
import { EditorRefApi, IRichTextEditor } from "@/types";
const RichTextEditor = (props: IRichTextEditor) => {
const {
disabledExtensions,
dragDropEnabled,
bubbleMenuEnabled = true,
extensions: externalExtensions = [],
} = props;
const { disabledExtensions, dragDropEnabled, bubbleMenuEnabled = true, extensions: externalExtensions = [] } = props;
const getExtensions = useCallback(() => {
const extensions = [
@@ -24,7 +19,11 @@ const RichTextEditor = (props: IRichTextEditor) => {
}),
];
if (!disabledExtensions?.includes("slash-commands")) {
extensions.push(SlashCommands());
extensions.push(
SlashCommands({
disabledExtensions,
})
);
}
return extensions;

View File

@@ -0,0 +1,6 @@
export const DocumentCollaborativeEvents = {
lock: { client: "locked", server: "lock" },
unlock: { client: "unlocked", server: "unlock" },
archive: { client: "archived", server: "archive" },
unarchive: { client: "unarchived", server: "unarchive" },
} as const;

View File

@@ -1,5 +1,5 @@
// plane helpers
import { convertHexEmojiToDecimal } from "@plane/helpers";
import { convertHexEmojiToDecimal } from "@plane/utils";
// plane ui
import { EmojiIconPicker, EmojiIconPickerTypes, Logo, TEmojiLogoProps } from "@plane/ui";
// helpers

View File

@@ -1,5 +1,5 @@
// plane helpers
import { sanitizeHTML } from "@plane/helpers";
import { sanitizeHTML } from "@plane/utils";
// plane ui
import { TEmojiLogoProps } from "@plane/ui";
// types

View File

@@ -19,6 +19,8 @@ import { TableHeader, TableCell, TableRow, Table } from "./table";
import { CustomTextAlignExtension } from "./text-align";
import { CustomCalloutExtensionConfig } from "./callout/extension-config";
import { CustomColorExtension } from "./custom-color";
// plane editor extensions
import { CoreEditorAdditionalExtensionsWithoutProps } from "@/plane-editor/extensions/core/without-props";
export const CoreEditorExtensionsWithoutProps = [
StarterKit.configure({
@@ -41,6 +43,16 @@ export const CoreEditorExtensionsWithoutProps = [
codeBlock: false,
horizontalRule: false,
blockquote: false,
paragraph: {
HTMLAttributes: {
class: "editor-paragraph-block",
},
},
heading: {
HTMLAttributes: {
class: "editor-heading-block",
},
},
dropcursor: false,
}),
CustomQuoteExtension,
@@ -89,6 +101,7 @@ export const CoreEditorExtensionsWithoutProps = [
CustomTextAlignExtension,
CustomCalloutExtensionConfig,
CustomColorExtension,
...CoreEditorAdditionalExtensionsWithoutProps,
];
export const DocumentEditorExtensionsWithoutProps = [IssueWidgetWithoutProps()];

View File

@@ -118,7 +118,6 @@ export const CustomImageBlock: React.FC<CustomImageBlockProps> = (props) => {
height: `${Math.round(initialHeight)}px` satisfies Pixel,
aspectRatio: aspectRatioCalculated,
};
setSize(initialComputedSize);
updateAttributesSafely(
initialComputedSize,

View File

@@ -29,12 +29,9 @@ export const CustomImageNode = (props: CustomImageNodeProps) => {
useEffect(() => {
const closestEditorContainer = imageComponentRef.current?.closest(".editor-container");
if (!closestEditorContainer) {
console.error("Editor container not found");
return;
if (closestEditorContainer) {
setEditorContainer(closestEditorContainer as HTMLDivElement);
}
setEditorContainer(closestEditorContainer as HTMLDivElement);
}, []);
// the image is already uploaded if the image-component node has src attribute
@@ -55,7 +52,7 @@ export const CustomImageNode = (props: CustomImageNodeProps) => {
setResolvedSrc(url as string);
};
getImageSource();
}, [imageFromFileSystem, node.attrs.src]);
}, [imgNodeSrc]);
return (
<NodeViewWrapper>

View File

@@ -1,11 +1,9 @@
import { Editor, mergeAttributes } from "@tiptap/core";
import { Image } from "@tiptap/extension-image";
import { MarkdownSerializerState } from "@tiptap/pm/markdown";
import { Node } from "@tiptap/pm/model";
import { ReactNodeViewRenderer } from "@tiptap/react";
import { v4 as uuidv4 } from "uuid";
// extensions
import { CustomImageNode, ImageAttributes } from "@/extensions/custom-image";
import { CustomImageNode } from "@/extensions/custom-image";
// plugins
import { TrackImageDeletionPlugin, TrackImageRestorationPlugin, isFileValid } from "@/plugins/image";
// types
@@ -126,14 +124,9 @@ export const CustomImageExtension = (props: TFileHandler) => {
deletedImageSet: new Map<string, boolean>(),
uploadInProgress: false,
maxFileSize,
// escape markdown for images
markdown: {
serialize(state: MarkdownSerializerState, node: Node) {
const attrs = node.attrs as ImageAttributes;
const imageSource = state.esc(this?.editor?.commands?.getImageSource?.(attrs.src) || attrs.src);
const imageWidth = state.esc(attrs.width?.toString());
state.write(`<img src="${state.esc(imageSource)}" width="${imageWidth}" />`);
state.closeBlock(node);
},
serialize() {},
},
};
},

View File

@@ -1,10 +1,8 @@
import { mergeAttributes } from "@tiptap/core";
import { Image } from "@tiptap/extension-image";
import { MarkdownSerializerState } from "@tiptap/pm/markdown";
import { Node } from "@tiptap/pm/model";
import { ReactNodeViewRenderer } from "@tiptap/react";
// components
import { CustomImageNode, ImageAttributes, UploadImageExtensionStorage } from "@/extensions/custom-image";
import { CustomImageNode, UploadImageExtensionStorage } from "@/extensions/custom-image";
// types
import { TFileHandler } from "@/types";
@@ -54,14 +52,9 @@ export const CustomReadOnlyImageExtension = (props: Pick<TFileHandler, "getAsset
addStorage() {
return {
fileMap: new Map(),
// escape markdown for images
markdown: {
serialize(state: MarkdownSerializerState, node: Node) {
const attrs = node.attrs as ImageAttributes;
const imageSource = state.esc(this?.editor?.commands?.getImageSource?.(attrs.src) || attrs.src);
const imageWidth = state.esc(attrs.width?.toString());
state.write(`<img src="${state.esc(imageSource)}" width="${imageWidth}" />`);
state.closeBlock(node);
},
serialize() {},
},
};
},

View File

@@ -1,3 +1,4 @@
import { Extensions } from "@tiptap/core";
import CharacterCount from "@tiptap/extension-character-count";
import Placeholder from "@tiptap/extension-placeholder";
import TaskItem from "@tiptap/extension-task-item";
@@ -32,9 +33,12 @@ import {
// helpers
import { isValidHttpUrl } from "@/helpers/common";
// types
import { IMentionHighlight, IMentionSuggestion, TFileHandler } from "@/types";
import { IMentionHighlight, IMentionSuggestion, TExtensions, TFileHandler } from "@/types";
// plane editor extensions
import { CoreEditorAdditionalExtensions } from "@/plane-editor/extensions";
type TArguments = {
disabledExtensions: TExtensions[];
enableHistory: boolean;
fileHandler: TFileHandler;
mentionConfig: {
@@ -45,8 +49,8 @@ type TArguments = {
tabIndex?: number;
};
export const CoreEditorExtensions = (args: TArguments) => {
const { enableHistory, fileHandler, mentionConfig, placeholder, tabIndex } = args;
export const CoreEditorExtensions = (args: TArguments): Extensions => {
const { disabledExtensions, enableHistory, fileHandler, mentionConfig, placeholder, tabIndex } = args;
return [
StarterKit.configure({
@@ -69,6 +73,16 @@ export const CoreEditorExtensions = (args: TArguments) => {
codeBlock: false,
horizontalRule: false,
blockquote: false,
paragraph: {
HTMLAttributes: {
class: "editor-paragraph-block",
},
},
heading: {
HTMLAttributes: {
class: "editor-heading-block",
},
},
dropcursor: {
class: "text-custom-text-300",
},
@@ -162,5 +176,8 @@ export const CoreEditorExtensions = (args: TArguments) => {
CustomTextAlignExtension,
CustomCalloutExtension,
CustomColorExtension,
...CoreEditorAdditionalExtensions({
disabledExtensions,
}),
];
};

View File

@@ -1,3 +1,4 @@
import { Extensions } from "@tiptap/core";
import CharacterCount from "@tiptap/extension-character-count";
import TaskItem from "@tiptap/extension-task-item";
import TaskList from "@tiptap/extension-task-list";
@@ -28,17 +29,20 @@ import {
// helpers
import { isValidHttpUrl } from "@/helpers/common";
// types
import { IMentionHighlight, TFileHandler } from "@/types";
import { IMentionHighlight, TExtensions, TFileHandler } from "@/types";
// plane editor extensions
import { CoreReadOnlyEditorAdditionalExtensions } from "@/plane-editor/extensions";
type Props = {
disabledExtensions: TExtensions[];
fileHandler: Pick<TFileHandler, "getAssetSrc">;
mentionConfig: {
mentionHighlights?: () => Promise<IMentionHighlight[]>;
};
};
export const CoreReadOnlyEditorExtensions = (props: Props) => {
const { fileHandler, mentionConfig } = props;
export const CoreReadOnlyEditorExtensions = (props: Props): Extensions => {
const { disabledExtensions, fileHandler, mentionConfig } = props;
return [
StarterKit.configure({
@@ -61,6 +65,16 @@ export const CoreReadOnlyEditorExtensions = (props: Props) => {
codeBlock: false,
horizontalRule: false,
blockquote: false,
paragraph: {
HTMLAttributes: {
class: "editor-paragraph-block",
},
},
heading: {
HTMLAttributes: {
class: "editor-heading-block",
},
},
dropcursor: false,
gapcursor: false,
}),
@@ -128,5 +142,8 @@ export const CoreReadOnlyEditorExtensions = (props: Props) => {
HeadingListExtension,
CustomTextAlignExtension,
CustomCalloutReadOnlyExtension,
...CoreReadOnlyEditorAdditionalExtensions({
disabledExtensions,
}),
];
};

View File

@@ -39,17 +39,27 @@ import {
setText,
} from "@/helpers/editor-commands";
// types
import { CommandProps, ISlashCommandItem } from "@/types";
import { CommandProps, ISlashCommandItem, TExtensions, TSlashCommandSectionKeys } from "@/types";
// plane editor extensions
import { coreEditorAdditionalSlashCommandOptions } from "@/plane-editor/extensions";
// local types
import { TSlashCommandAdditionalOption } from "./root";
export type TSlashCommandSection = {
key: string;
key: TSlashCommandSectionKeys;
title?: string;
items: ISlashCommandItem[];
};
type TArgs = {
additionalOptions?: TSlashCommandAdditionalOption[];
disabledExtensions: TExtensions[];
};
export const getSlashCommandFilteredSections =
(additionalOptions?: ISlashCommandItem[]) =>
(args: TArgs) =>
({ query }: { query: string }): TSlashCommandSection[] => {
const { additionalOptions, disabledExtensions } = args;
const SLASH_COMMAND_SECTIONS: TSlashCommandSection[] = [
{
key: "general",
@@ -201,7 +211,7 @@ export const getSlashCommandFilteredSections =
],
},
{
key: "text-color",
key: "text-colors",
title: "Colors",
items: [
{
@@ -242,7 +252,7 @@ export const getSlashCommandFilteredSections =
],
},
{
key: "background-color",
key: "background-colors",
title: "Background colors",
items: [
{
@@ -279,8 +289,19 @@ export const getSlashCommandFilteredSections =
},
];
additionalOptions?.map((item) => {
SLASH_COMMAND_SECTIONS?.[0]?.items.push(item);
[
...(additionalOptions ?? []),
...coreEditorAdditionalSlashCommandOptions({
disabledExtensions,
}),
]?.forEach((item) => {
const sectionToPushTo = SLASH_COMMAND_SECTIONS.find((s) => s.key === item.section) ?? SLASH_COMMAND_SECTIONS[0];
const itemIndexToPushAfter = sectionToPushTo.items.findIndex((i) => i.commandKey === item.pushAfter);
if (itemIndexToPushAfter !== -1) {
sectionToPushTo.items.splice(itemIndexToPushAfter + 1, 0, item);
} else {
sectionToPushTo.items.push(item);
}
});
const filteredSlashSections = SLASH_COMMAND_SECTIONS.map((section) => ({

View File

@@ -41,7 +41,7 @@ export const SlashCommandsMenu = (props: SlashCommandsMenuProps) => {
if (nextItem < 0) {
nextSection = currentSection - 1;
if (nextSection < 0) nextSection = sections.length - 1;
nextItem = sections[nextSection].items.length - 1;
nextItem = sections[nextSection]?.items.length - 1;
}
}
if (e.key === "ArrowDown") {

View File

@@ -3,7 +3,7 @@ import { ReactRenderer } from "@tiptap/react";
import Suggestion, { SuggestionOptions } from "@tiptap/suggestion";
import tippy from "tippy.js";
// types
import { ISlashCommandItem } from "@/types";
import { ISlashCommandItem, TEditorCommands, TExtensions, TSlashCommandSectionKeys } from "@/types";
// components
import { getSlashCommandFilteredSections } from "./command-items-list";
import { SlashCommandsMenu, SlashCommandsMenuProps } from "./command-menu";
@@ -12,6 +12,11 @@ export type SlashCommandOptions = {
suggestion: Omit<SuggestionOptions, "editor">;
};
export type TSlashCommandAdditionalOption = ISlashCommandItem & {
section: TSlashCommandSectionKeys;
pushAfter: TEditorCommands;
};
const Command = Extension.create<SlashCommandOptions>({
name: "slash-command",
addOptions() {
@@ -102,10 +107,15 @@ const renderItems = () => {
};
};
export const SlashCommands = (additionalOptions?: ISlashCommandItem[]) =>
type TExtensionProps = {
additionalOptions?: TSlashCommandAdditionalOption[];
disabledExtensions: TExtensions[];
};
export const SlashCommands = (props: TExtensionProps) =>
Command.configure({
suggestion: {
items: getSlashCommandFilteredSections(additionalOptions),
items: getSlashCommandFilteredSections(props),
render: renderItems,
},
});

View File

@@ -0,0 +1,11 @@
import { DocumentCollaborativeEvents } from "@/constants/document-collaborative-events";
import { TDocumentEventKey, TDocumentEventsClient, TDocumentEventsServer } from "@/types/document-collaborative-events";
export const getServerEventName = (clientEvent: TDocumentEventsClient): TDocumentEventsServer | undefined => {
for (const key in DocumentCollaborativeEvents) {
if (DocumentCollaborativeEvents[key as TDocumentEventKey].client === clientEvent) {
return DocumentCollaborativeEvents[key as TDocumentEventKey].server;
}
}
return undefined;
};

View File

@@ -1,4 +1,4 @@
import { useEffect, useLayoutEffect, useMemo, useState } from "react";
import { useEffect, useMemo, useState } from "react";
import { HocuspocusProvider } from "@hocuspocus/provider";
import Collaboration from "@tiptap/extension-collaboration";
import { IndexeddbPersistence } from "y-indexeddb";
@@ -58,23 +58,22 @@ export const useCollaborativeEditor = (props: TCollaborativeEditorProps) => {
[id, realtimeConfig, serverHandler, user]
);
// destroy and disconnect connection on unmount
const localProvider = useMemo(
() => (id ? new IndexeddbPersistence(id, provider.document) : undefined),
[id, provider]
);
// destroy and disconnect all providers connection on unmount
useEffect(
() => () => {
provider.destroy();
provider.disconnect();
},
[provider]
);
// indexed db integration for offline support
useLayoutEffect(() => {
const localProvider = new IndexeddbPersistence(id, provider.document);
return () => {
provider?.destroy();
localProvider?.destroy();
};
}, [provider, id]);
},
[provider, localProvider]
);
const editor = useEditor({
disabledExtensions,
id,
onTransaction,
editorProps,

View File

@@ -16,12 +16,21 @@ import { IMarking, scrollSummary, scrollToNodeViaDOMCoordinates } from "@/helper
// props
import { CoreEditorProps } from "@/props";
// types
import { EditorRefApi, IMentionHighlight, IMentionSuggestion, TEditorCommands, TFileHandler } from "@/types";
import type {
TDocumentEventsServer,
EditorRefApi,
IMentionHighlight,
IMentionSuggestion,
TEditorCommands,
TFileHandler,
TExtensions,
} from "@/types";
export interface CustomEditorProps {
editorClassName: string;
editorProps?: EditorProps;
enableHistory: boolean;
disabledExtensions: TExtensions[];
extensions?: any;
fileHandler: TFileHandler;
forwardedRef?: MutableRefObject<EditorRefApi | null>;
@@ -45,6 +54,7 @@ export interface CustomEditorProps {
export const useEditor = (props: CustomEditorProps) => {
const {
disabledExtensions,
editorClassName,
editorProps = {},
enableHistory,
@@ -58,9 +68,9 @@ export const useEditor = (props: CustomEditorProps) => {
onChange,
onTransaction,
placeholder,
provider,
tabIndex,
value,
provider,
autofocus = false,
} = props;
// states
@@ -79,6 +89,7 @@ export const useEditor = (props: CustomEditorProps) => {
},
extensions: [
...CoreEditorExtensions({
disabledExtensions,
enableHistory,
fileHandler,
mentionConfig: {
@@ -247,7 +258,7 @@ export const useEditor = (props: CustomEditorProps) => {
if (empty) return null;
const nodesArray: string[] = [];
state.doc.nodesBetween(from, to, (node, pos, parent) => {
state.doc.nodesBetween(from, to, (node, _pos, parent) => {
if (parent === state.doc && editorRef.current) {
const serializer = DOMSerializer.fromSchema(editorRef.current?.schema);
const dom = serializer.serializeNode(node);
@@ -288,6 +299,8 @@ export const useEditor = (props: CustomEditorProps) => {
if (!document) return;
Y.applyUpdate(document, value);
},
emitRealTimeUpdate: (message: TDocumentEventsServer) => provider?.sendStateless(message),
listenToRealTimeUpdate: () => provider && { on: provider.on.bind(provider), off: provider.off.bind(provider) },
}),
[editorRef, savedSelection]
);

View File

@@ -1,4 +1,4 @@
import { useEffect, useLayoutEffect, useMemo, useState } from "react";
import { useEffect, useMemo, useState } from "react";
import { HocuspocusProvider } from "@hocuspocus/provider";
import Collaboration from "@tiptap/extension-collaboration";
import { IndexeddbPersistence } from "y-indexeddb";
@@ -11,6 +11,7 @@ import { TReadOnlyCollaborativeEditorProps } from "@/types";
export const useReadOnlyCollaborativeEditor = (props: TReadOnlyCollaborativeEditorProps) => {
const {
disabledExtensions,
editorClassName,
editorProps = {},
extensions,
@@ -30,8 +31,8 @@ export const useReadOnlyCollaborativeEditor = (props: TReadOnlyCollaborativeEdit
const provider = useMemo(
() =>
new HocuspocusProvider({
url: realtimeConfig.url,
name: id,
url: realtimeConfig.url,
token: JSON.stringify(user),
parameters: realtimeConfig.queryParams,
onAuthenticationFailed: () => {
@@ -47,25 +48,26 @@ export const useReadOnlyCollaborativeEditor = (props: TReadOnlyCollaborativeEdit
},
onSynced: () => setHasServerSynced(true),
}),
[id, realtimeConfig, user]
[id, realtimeConfig, serverHandler, user]
);
// indexed db integration for offline support
const localProvider = useMemo(
() => (id ? new IndexeddbPersistence(id, provider.document) : undefined),
[id, provider]
);
// destroy and disconnect connection on unmount
useEffect(
() => () => {
provider.destroy();
provider.disconnect();
},
[provider]
);
// indexed db integration for offline support
useLayoutEffect(() => {
const localProvider = new IndexeddbPersistence(id, provider.document);
return () => {
localProvider?.destroy();
};
}, [provider, id]);
},
[provider, localProvider]
);
const editor = useReadOnlyEditor({
disabledExtensions,
editorProps,
editorClassName,
extensions: [

View File

@@ -11,14 +11,21 @@ import { IMarking, scrollSummary } from "@/helpers/scroll-to-node";
// props
import { CoreReadOnlyEditorProps } from "@/props";
// types
import { EditorReadOnlyRefApi, IMentionHighlight, TFileHandler } from "@/types";
import type {
EditorReadOnlyRefApi,
IMentionHighlight,
TExtensions,
TDocumentEventsServer,
TFileHandler,
} from "@/types";
interface CustomReadOnlyEditorProps {
initialValue?: string;
disabledExtensions: TExtensions[];
editorClassName: string;
forwardedRef?: MutableRefObject<EditorReadOnlyRefApi | null>;
extensions?: any;
editorProps?: EditorProps;
extensions?: any;
forwardedRef?: MutableRefObject<EditorReadOnlyRefApi | null>;
initialValue?: string;
fileHandler: Pick<TFileHandler, "getAssetSrc">;
handleEditorReady?: (value: boolean) => void;
mentionHandler: {
@@ -29,6 +36,7 @@ interface CustomReadOnlyEditorProps {
export const useReadOnlyEditor = (props: CustomReadOnlyEditorProps) => {
const {
disabledExtensions,
initialValue,
editorClassName,
forwardedRef,
@@ -54,6 +62,7 @@ export const useReadOnlyEditor = (props: CustomReadOnlyEditorProps) => {
},
extensions: [
...CoreReadOnlyEditorExtensions({
disabledExtensions,
mentionConfig: {
mentionHighlights: mentionHandler.highlights,
},
@@ -117,6 +126,8 @@ export const useReadOnlyEditor = (props: CustomReadOnlyEditorProps) => {
editorRef.current?.off("update");
};
},
emitRealTimeUpdate: (message: TDocumentEventsServer) => provider?.sendStateless(message),
listenToRealTimeUpdate: () => provider && { on: provider.on.bind(provider), off: provider.off.bind(provider) },
getHeadings: () => editorRef?.current?.storage.headingList.headings,
}));

Some files were not shown because too many files have changed in this diff Show More