mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Compare commits
104 Commits
fix-docker
...
chore-404-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7a46ac192d | ||
|
|
f278a284c4 | ||
|
|
2bcf6c76cd | ||
|
|
fb3e022042 | ||
|
|
e3fbb7b073 | ||
|
|
cce6dd581c | ||
|
|
d86ac368a4 | ||
|
|
101994840a | ||
|
|
f60f57ef11 | ||
|
|
546217f09b | ||
|
|
6df8323665 | ||
|
|
77d022df71 | ||
|
|
797f150ec4 | ||
|
|
b54f54999e | ||
|
|
dff176be8f | ||
|
|
2bbaaed3ea | ||
|
|
b5ceb94fb2 | ||
|
|
feb6243065 | ||
|
|
5dacba74c9 | ||
|
|
0efb0c239c | ||
|
|
c8be836d6c | ||
|
|
833b82e247 | ||
|
|
280aa7f671 | ||
|
|
eac1115566 | ||
|
|
8166a757a7 | ||
|
|
be5d77d978 | ||
|
|
18fb3b8450 | ||
|
|
ef5616905e | ||
|
|
aeb41e603c | ||
|
|
55eea1a8b7 | ||
|
|
fa87ff14b7 | ||
|
|
7d91b5f8df | ||
|
|
3ce40dfa2f | ||
|
|
f65253c994 | ||
|
|
97fcfaa653 | ||
|
|
0e1ebff978 | ||
|
|
642dabfe35 | ||
|
|
48557cb670 | ||
|
|
608da1465c | ||
|
|
dbcc7bedb4 | ||
|
|
c401b26dd4 | ||
|
|
a4bca0c39c | ||
|
|
24899887b2 | ||
|
|
c6953ff878 | ||
|
|
06be9ab81b | ||
|
|
ed8d00acb1 | ||
|
|
915e374485 | ||
|
|
1d5b93cebd | ||
|
|
df65b8c34a | ||
|
|
4c688b1d25 | ||
|
|
bfc6ed839f | ||
|
|
b68396a4b2 | ||
|
|
b4fc715aba | ||
|
|
33a1b916cb | ||
|
|
2818310619 | ||
|
|
882520b3c7 | ||
|
|
20132e7544 | ||
|
|
0ae57b49d2 | ||
|
|
d347269afb | ||
|
|
a3fd616ec4 | ||
|
|
9eeff158d5 | ||
|
|
ef20b5814e | ||
|
|
14914e8716 | ||
|
|
b738e39a4a | ||
|
|
993c7899b6 | ||
|
|
2b411de1e3 | ||
|
|
1f9222065e | ||
|
|
670134562f | ||
|
|
144c793e9e | ||
|
|
0a924e4824 | ||
|
|
08702a5381 | ||
|
|
270f282c3c | ||
|
|
37699362ad | ||
|
|
27cec64c56 | ||
|
|
782b09eeaf | ||
|
|
5ac5892fe5 | ||
|
|
96c403ff0b | ||
|
|
543552f492 | ||
|
|
c3cfcc1b92 | ||
|
|
ac84d6ecf0 | ||
|
|
475b7a8396 | ||
|
|
00f78bd6a1 | ||
|
|
34337f90c1 | ||
|
|
4f68aaafa6 | ||
|
|
9c10235fca | ||
|
|
9c1b158291 | ||
|
|
2d0a15efd6 | ||
|
|
d62ac6269b | ||
|
|
d9e3405f5a | ||
|
|
adee686ea3 | ||
|
|
81fae36c23 | ||
|
|
3f652ba44e | ||
|
|
16aa1d7034 | ||
|
|
0db581509c | ||
|
|
523ab3f4a1 | ||
|
|
a57c37c26c | ||
|
|
65a0530cfe | ||
|
|
7bb291408d | ||
|
|
4be94adaca | ||
|
|
2d1b3fb39e | ||
|
|
585432824f | ||
|
|
fe9640533c | ||
|
|
5ec817ba37 | ||
|
|
9279b5f1fb |
2
.github/workflows/build-branch.yml
vendored
2
.github/workflows/build-branch.yml
vendored
@@ -273,7 +273,7 @@ jobs:
|
||||
run: |
|
||||
cp ./deploy/selfhost/install.sh deploy/selfhost/setup.sh
|
||||
sed -i 's/${APP_RELEASE:-stable}/${APP_RELEASE:-'${REL_VERSION}'}/g' deploy/selfhost/docker-compose.yml
|
||||
sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deploy/selfhost/variables.env
|
||||
# sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deploy/selfhost/variables.env
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -88,6 +88,7 @@ deploy/selfhost/plane-app/
|
||||
*storybook.log
|
||||
output.css
|
||||
|
||||
dev-editor
|
||||
# Redis
|
||||
*.rdb
|
||||
*.rdb.gz
|
||||
*.rdb.gz
|
||||
|
||||
@@ -15,6 +15,22 @@ Without said minimal reproduction, we won't be able to investigate all [issues](
|
||||
|
||||
You can open a new issue with this [issue form](https://github.com/makeplane/plane/issues/new).
|
||||
|
||||
### Naming conventions for issues
|
||||
|
||||
When opening a new issue, please use a clear and concise title that follows this format:
|
||||
|
||||
- For bugs: `🐛 Bug: [short description]`
|
||||
- For features: `🚀 Feature: [short description]`
|
||||
- For improvements: `🛠️ Improvement: [short description]`
|
||||
- For documentation: `📘 Docs: [short description]`
|
||||
|
||||
**Examples:**
|
||||
- `🐛 Bug: API token expiry time not saving correctly`
|
||||
- `📘 Docs: Clarify RAM requirement for local setup`
|
||||
- `🚀 Feature: Allow custom time selection for token expiration`
|
||||
|
||||
This helps us triage and manage issues more efficiently.
|
||||
|
||||
## Projects setup and Architecture
|
||||
|
||||
### Requirements
|
||||
@@ -23,6 +39,8 @@ You can open a new issue with this [issue form](https://github.com/makeplane/pla
|
||||
- Python version 3.8+
|
||||
- Postgres version v14
|
||||
- Redis version v6.2.7
|
||||
- **Memory**: Minimum **12 GB RAM** recommended
|
||||
> ⚠️ Running the project on a system with only 8 GB RAM may lead to setup failures or memory crashes (especially during Docker container build/start or dependency install). Use cloud environments like GitHub Codespaces or upgrade local RAM if possible.
|
||||
|
||||
### Setup the project
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "admin",
|
||||
"description": "Admin UI for Plane",
|
||||
"version": "0.25.3",
|
||||
"version": "0.26.0",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
@@ -29,7 +29,7 @@
|
||||
"lucide-react": "^0.469.0",
|
||||
"mobx": "^6.12.0",
|
||||
"mobx-react": "^9.1.1",
|
||||
"next": "^14.2.25",
|
||||
"next": "^14.2.26",
|
||||
"next-themes": "^0.2.1",
|
||||
"postcss": "^8.4.38",
|
||||
"react": "^18.3.1",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "plane-api",
|
||||
"version": "0.25.3",
|
||||
"version": "0.26.0",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"description": "API server powering Plane's backend"
|
||||
|
||||
@@ -39,7 +39,15 @@ class CycleSerializer(BaseSerializer):
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("end_date", None) is not None
|
||||
):
|
||||
project_id = self.initial_data.get("project_id") or self.instance.project_id
|
||||
project_id = self.initial_data.get("project_id") or (
|
||||
self.instance.project_id
|
||||
if self.instance and hasattr(self.instance, "project_id")
|
||||
else None
|
||||
)
|
||||
|
||||
if not project_id:
|
||||
raise serializers.ValidationError("Project ID is required")
|
||||
|
||||
is_start_date_end_date_equal = (
|
||||
True
|
||||
if str(data.get("start_date")) == str(data.get("end_date"))
|
||||
|
||||
@@ -16,7 +16,6 @@ class ProjectSerializer(BaseSerializer):
|
||||
member_role = serializers.IntegerField(read_only=True)
|
||||
is_deployed = serializers.BooleanField(read_only=True)
|
||||
cover_image_url = serializers.CharField(read_only=True)
|
||||
inbox_view = serializers.BooleanField(read_only=True, source="intake_view")
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
|
||||
@@ -4,16 +4,6 @@ from plane.api.views import IntakeIssueAPIEndpoint
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/",
|
||||
IntakeIssueAPIEndpoint.as_view(),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:issue_id>/",
|
||||
IntakeIssueAPIEndpoint.as_view(),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-issues/",
|
||||
IntakeIssueAPIEndpoint.as_view(),
|
||||
|
||||
@@ -39,7 +39,7 @@ from plane.db.models import (
|
||||
UserFavorite,
|
||||
)
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
|
||||
from plane.utils.host import base_host
|
||||
from .base import BaseAPIView
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
|
||||
@@ -141,8 +141,10 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
if pk:
|
||||
queryset = self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
|
||||
data = CycleSerializer(
|
||||
queryset, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
queryset,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
queryset = self.get_queryset().filter(archived_at__isnull=True)
|
||||
@@ -154,8 +156,11 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
start_date__lte=timezone.now(), end_date__gte=timezone.now()
|
||||
)
|
||||
data = CycleSerializer(
|
||||
queryset, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
queryset,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -166,8 +171,11 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -178,8 +186,11 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -190,8 +201,11 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -204,16 +218,22 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields,
|
||||
expand=self.expand, context={"project": project}
|
||||
cycles,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
context={"project": project},
|
||||
).data,
|
||||
)
|
||||
|
||||
@@ -259,7 +279,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -331,7 +351,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -702,7 +722,7 @@ class CycleIssueAPIEndpoint(BaseAPIView):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
# Return all Cycle Issues
|
||||
return Response(
|
||||
@@ -1176,7 +1196,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -18,8 +18,9 @@ from plane.api.serializers import IntakeIssueSerializer, IssueSerializer
|
||||
from plane.app.permissions import ProjectLitePermission
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import Intake, IntakeIssue, Issue, Project, ProjectMember, State
|
||||
|
||||
from plane.utils.host import base_host
|
||||
from .base import BaseAPIView
|
||||
from plane.db.models.intake import SourceType
|
||||
|
||||
|
||||
class IntakeIssueAPIEndpoint(BaseAPIView):
|
||||
@@ -125,7 +126,7 @@ class IntakeIssueAPIEndpoint(BaseAPIView):
|
||||
intake_id=intake.id,
|
||||
project_id=project_id,
|
||||
issue=issue,
|
||||
source=request.data.get("source", "IN-APP"),
|
||||
source=SourceType.IN_APP,
|
||||
)
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
@@ -297,7 +298,7 @@ class IntakeIssueAPIEndpoint(BaseAPIView):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=False,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
intake=str(intake_issue.id),
|
||||
)
|
||||
|
||||
|
||||
@@ -56,6 +56,8 @@ from plane.db.models import (
|
||||
from plane.settings.storage import S3Storage
|
||||
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
|
||||
from .base import BaseAPIView
|
||||
from plane.utils.host import base_host
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
|
||||
|
||||
class WorkspaceIssueAPIEndpoint(BaseAPIView):
|
||||
@@ -321,6 +323,17 @@ class IssueAPIEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
)
|
||||
|
||||
# Send the model activity
|
||||
model_activity.delay(
|
||||
model_name="issue",
|
||||
model_id=str(serializer.data["id"]),
|
||||
requested_data=request.data,
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@@ -1048,7 +1061,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
# Get the storage metadata
|
||||
@@ -1108,7 +1121,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
current_instance=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
# Update the attachment
|
||||
|
||||
@@ -33,6 +33,7 @@ from plane.db.models import (
|
||||
|
||||
from .base import BaseAPIView
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ModuleAPIEndpoint(BaseAPIView):
|
||||
@@ -174,7 +175,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
module = Module.objects.get(pk=serializer.data["id"])
|
||||
serializer = ModuleSerializer(module)
|
||||
@@ -226,7 +227,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -280,6 +281,7 @@ class ModuleAPIEndpoint(BaseAPIView):
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps({"module_name": str(module.name)}),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
module.delete()
|
||||
# Delete the module issues
|
||||
@@ -449,6 +451,7 @@ class ModuleIssueAPIEndpoint(BaseAPIView):
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(
|
||||
|
||||
@@ -30,6 +30,7 @@ from plane.db.models import (
|
||||
)
|
||||
from plane.bgtasks.webhook_task import model_activity, webhook_activity
|
||||
from .base import BaseAPIView
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ProjectAPIEndpoint(BaseAPIView):
|
||||
@@ -228,7 +229,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
serializer = ProjectSerializer(project)
|
||||
@@ -238,7 +239,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The project name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
except Workspace.DoesNotExist:
|
||||
return Response(
|
||||
@@ -247,7 +248,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
except ValidationError:
|
||||
return Response(
|
||||
{"identifier": "The project identifier is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
def patch(self, request, slug, pk):
|
||||
@@ -258,9 +259,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
ProjectSerializer(project).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
intake_view = request.data.get(
|
||||
"inbox_view", request.data.get("intake_view", project.intake_view)
|
||||
)
|
||||
intake_view = request.data.get("intake_view", project.intake_view)
|
||||
|
||||
if project.archived_at:
|
||||
return Response(
|
||||
@@ -297,7 +296,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
serializer = ProjectSerializer(project)
|
||||
@@ -307,7 +306,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The project name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
except (Project.DoesNotExist, Workspace.DoesNotExist):
|
||||
return Response(
|
||||
@@ -316,7 +315,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
except ValidationError:
|
||||
return Response(
|
||||
{"identifier": "The project identifier is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
def delete(self, request, slug, pk):
|
||||
@@ -334,7 +333,7 @@ class ProjectAPIEndpoint(BaseAPIView):
|
||||
new_value=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
current_site=request.META.get("HTTP_ORIGIN"),
|
||||
current_site=base_host(request=request, is_app=True),
|
||||
event_id=project.id,
|
||||
old_identifier=None,
|
||||
new_identifier=None,
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import APIToken, APIActivityLog
|
||||
from rest_framework import serializers
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
class APITokenSerializer(BaseSerializer):
|
||||
@@ -17,10 +19,17 @@ class APITokenSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class APITokenReadSerializer(BaseSerializer):
|
||||
is_active = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = APIToken
|
||||
exclude = ("token",)
|
||||
|
||||
def get_is_active(self, obj: APIToken) -> bool:
|
||||
if obj.expired_at is None:
|
||||
return True
|
||||
return timezone.now() < obj.expired_at
|
||||
|
||||
|
||||
class APIActivityLogSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
|
||||
@@ -352,8 +352,19 @@ class IssueRelationSerializer(BaseSerializer):
|
||||
"state_id",
|
||||
"priority",
|
||||
"assignee_ids",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_by",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = ["workspace", "project"]
|
||||
|
||||
|
||||
class RelatedIssueSerializer(BaseSerializer):
|
||||
@@ -383,8 +394,19 @@ class RelatedIssueSerializer(BaseSerializer):
|
||||
"state_id",
|
||||
"priority",
|
||||
"assignee_ids",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_by",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_by",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = ["workspace", "project"]
|
||||
|
||||
|
||||
class IssueAssigneeSerializer(BaseSerializer):
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
from django.urls import path
|
||||
|
||||
|
||||
from plane.app.views import IntakeViewSet, IntakeIssueViewSet
|
||||
from plane.app.views import (
|
||||
IntakeViewSet,
|
||||
IntakeIssueViewSet,
|
||||
IntakeWorkItemDescriptionVersionEndpoint,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
@@ -53,4 +57,14 @@ urlpatterns = [
|
||||
),
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-work-items/<uuid:work_item_id>/description-versions/",
|
||||
IntakeWorkItemDescriptionVersionEndpoint.as_view(),
|
||||
name="intake-work-item-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-work-items/<uuid:work_item_id>/description-versions/<uuid:pk>/",
|
||||
IntakeWorkItemDescriptionVersionEndpoint.as_view(),
|
||||
name="intake-work-item-versions",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -25,7 +25,7 @@ from plane.app.views import (
|
||||
IssueAttachmentV2Endpoint,
|
||||
IssueBulkUpdateDateEndpoint,
|
||||
IssueVersionEndpoint,
|
||||
IssueDescriptionVersionEndpoint,
|
||||
WorkItemDescriptionVersionEndpoint,
|
||||
IssueMetaEndpoint,
|
||||
IssueDetailIdentifierEndpoint,
|
||||
)
|
||||
@@ -263,22 +263,22 @@ urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/versions/",
|
||||
IssueVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
name="issue-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/versions/<uuid:pk>/",
|
||||
IssueVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
name="issue-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/description-versions/",
|
||||
IssueDescriptionVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:work_item_id>/description-versions/",
|
||||
WorkItemDescriptionVersionEndpoint.as_view(),
|
||||
name="work-item-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/description-versions/<uuid:pk>/",
|
||||
IssueDescriptionVersionEndpoint.as_view(),
|
||||
name="page-versions",
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:work_item_id>/description-versions/<uuid:pk>/",
|
||||
WorkItemDescriptionVersionEndpoint.as_view(),
|
||||
name="work-item-versions",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/meta/",
|
||||
|
||||
@@ -144,7 +144,7 @@ from .issue.sub_issue import SubIssuesEndpoint
|
||||
|
||||
from .issue.subscriber import IssueSubscriberViewSet
|
||||
|
||||
from .issue.version import IssueVersionEndpoint, IssueDescriptionVersionEndpoint
|
||||
from .issue.version import IssueVersionEndpoint, WorkItemDescriptionVersionEndpoint
|
||||
|
||||
from .module.base import (
|
||||
ModuleViewSet,
|
||||
@@ -184,7 +184,11 @@ from .estimate.base import (
|
||||
EstimatePointEndpoint,
|
||||
)
|
||||
|
||||
from .intake.base import IntakeViewSet, IntakeIssueViewSet
|
||||
from .intake.base import (
|
||||
IntakeViewSet,
|
||||
IntakeIssueViewSet,
|
||||
IntakeWorkItemDescriptionVersionEndpoint,
|
||||
)
|
||||
|
||||
from .analytic.base import (
|
||||
AnalyticsEndpoint,
|
||||
|
||||
@@ -137,7 +137,7 @@ class UserAssetsV2Endpoint(BaseAPIView):
|
||||
if type not in allowed_types:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
|
||||
"error": "Invalid file type. Only JPEG, PNG, WebP, JPG and GIF files are allowed.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -351,7 +351,7 @@ class WorkspaceFileAssetEndpoint(BaseAPIView):
|
||||
if type not in allowed_types:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
|
||||
"error": "Invalid file type. Only JPEG, PNG, WebP, JPG and GIF files are allowed.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -552,7 +552,7 @@ class ProjectAssetEndpoint(BaseAPIView):
|
||||
if type not in allowed_types:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
|
||||
"error": "Invalid file type. Only JPEG, PNG, WebP, JPG and GIF files are allowed.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -683,7 +683,7 @@ class ProjectBulkAssetEndpoint(BaseAPIView):
|
||||
# For some cases, the bulk api is called after the issue is deleted creating
|
||||
# an integrity error
|
||||
try:
|
||||
assets.update(issue_id=entity_id)
|
||||
assets.update(issue_id=entity_id, project_id=project_id)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
|
||||
@@ -51,8 +51,7 @@ from plane.db.models import (
|
||||
)
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
|
||||
# Module imports
|
||||
from plane.utils.host import base_host
|
||||
from .. import BaseAPIView, BaseViewSet
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from plane.utils.timezone_converter import convert_to_utc, user_timezone_converter
|
||||
@@ -335,7 +334,7 @@ class CycleViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(cycle, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -428,7 +427,7 @@ class CycleViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(cycle, status=status.HTTP_200_OK)
|
||||
@@ -541,7 +540,7 @@ class CycleViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
# TODO: Soft delete the cycle break the onetoone relationship with cycle issue
|
||||
cycle.delete()
|
||||
@@ -1080,7 +1079,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -27,7 +27,7 @@ from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPaginator
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class CycleIssueViewSet(BaseViewSet):
|
||||
serializer_class = CycleIssueSerializer
|
||||
@@ -291,7 +291,7 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
|
||||
|
||||
@@ -317,7 +317,7 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
cycle_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -27,16 +27,24 @@ from plane.db.models import (
|
||||
Project,
|
||||
ProjectMember,
|
||||
CycleIssue,
|
||||
IssueDescriptionVersion,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
IssueCreateSerializer,
|
||||
IssueSerializer,
|
||||
IssueDetailSerializer,
|
||||
IntakeSerializer,
|
||||
IntakeIssueSerializer,
|
||||
IntakeIssueDetailSerializer,
|
||||
IssueDescriptionVersionDetailSerializer,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.bgtasks.issue_description_version_task import issue_description_version_task
|
||||
from plane.app.views.base import BaseAPIView
|
||||
from plane.utils.timezone_converter import user_timezone_converter
|
||||
from plane.utils.global_paginator import paginate
|
||||
from plane.utils.host import base_host
|
||||
from plane.db.models.intake import SourceType
|
||||
|
||||
|
||||
class IntakeViewSet(BaseViewSet):
|
||||
@@ -87,7 +95,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
serializer_class = IntakeIssueSerializer
|
||||
model = IntakeIssue
|
||||
|
||||
filterset_fields = ["statulls"]
|
||||
filterset_fields = ["status"]
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
@@ -218,7 +226,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
role=5,
|
||||
role=ROLE.GUEST.value,
|
||||
is_active=True,
|
||||
).exists()
|
||||
and not project.guest_view_all_features
|
||||
@@ -271,7 +279,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
intake_id=intake_id.id,
|
||||
project_id=project_id,
|
||||
issue_id=serializer.data["id"],
|
||||
source=request.data.get("source", "IN-APP"),
|
||||
source=SourceType.IN_APP,
|
||||
)
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
@@ -283,9 +291,16 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
intake=str(intake_issue.id),
|
||||
)
|
||||
# updated issue description version
|
||||
issue_description_version_task.delay(
|
||||
updated_issue=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
issue_id=str(serializer.data["id"]),
|
||||
user_id=request.user.id,
|
||||
is_creating=True,
|
||||
)
|
||||
intake_issue = (
|
||||
IntakeIssue.objects.select_related("issue")
|
||||
.prefetch_related("issue__labels", "issue__assignees")
|
||||
@@ -385,13 +400,15 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
),
|
||||
"description": issue_data.get("description", issue.description),
|
||||
}
|
||||
current_instance = json.dumps(
|
||||
IssueDetailSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
issue_serializer = IssueCreateSerializer(
|
||||
issue, data=issue_data, partial=True, context={"project_id": project_id}
|
||||
)
|
||||
|
||||
if issue_serializer.is_valid():
|
||||
current_instance = issue
|
||||
# Log all the updates
|
||||
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
|
||||
if issue is not None:
|
||||
@@ -401,15 +418,18 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(current_instance).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
intake=str(intake_issue.id),
|
||||
)
|
||||
# updated issue description version
|
||||
issue_description_version_task.delay(
|
||||
updated_issue=current_instance,
|
||||
issue_id=str(pk),
|
||||
user_id=request.user.id,
|
||||
)
|
||||
issue_serializer.save()
|
||||
else:
|
||||
return Response(
|
||||
@@ -467,7 +487,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=False,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
intake=(intake_issue.id),
|
||||
)
|
||||
|
||||
@@ -549,7 +569,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
role=5,
|
||||
role=ROLE.GUEST.value,
|
||||
is_active=True,
|
||||
).exists()
|
||||
and not project.guest_view_all_features
|
||||
@@ -557,7 +577,7 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
):
|
||||
return Response(
|
||||
{"error": "You are not allowed to view this issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
issue = IntakeIssueDetailSerializer(intake_issue).data
|
||||
return Response(issue, status=status.HTTP_200_OK)
|
||||
@@ -584,3 +604,80 @@ class IntakeIssueViewSet(BaseViewSet):
|
||||
|
||||
intake_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class IntakeWorkItemDescriptionVersionEndpoint(BaseAPIView):
|
||||
def process_paginated_result(self, fields, results, timezone):
|
||||
paginated_data = results.values(*fields)
|
||||
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
paginated_data = user_timezone_converter(
|
||||
paginated_data, datetime_fields, timezone
|
||||
)
|
||||
|
||||
return paginated_data
|
||||
|
||||
@allow_permission(allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def get(self, request, slug, project_id, work_item_id, pk=None):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
issue = Issue.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=work_item_id
|
||||
)
|
||||
|
||||
if (
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
role=ROLE.GUEST.value,
|
||||
is_active=True,
|
||||
).exists()
|
||||
and not project.guest_view_all_features
|
||||
and not issue.created_by == request.user
|
||||
):
|
||||
return Response(
|
||||
{"error": "You are not allowed to view this issue"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
if pk:
|
||||
issue_description_version = IssueDescriptionVersion.objects.get(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_id=work_item_id,
|
||||
pk=pk,
|
||||
)
|
||||
|
||||
serializer = IssueDescriptionVersionDetailSerializer(
|
||||
issue_description_version
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
cursor = request.GET.get("cursor", None)
|
||||
|
||||
required_fields = [
|
||||
"id",
|
||||
"workspace",
|
||||
"project",
|
||||
"issue",
|
||||
"last_saved_at",
|
||||
"owned_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
]
|
||||
|
||||
issue_description_versions_queryset = IssueDescriptionVersion.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, issue_id=work_item_id
|
||||
)
|
||||
|
||||
paginated_data = paginate(
|
||||
base_queryset=issue_description_versions_queryset,
|
||||
queryset=issue_description_versions_queryset,
|
||||
cursor=cursor,
|
||||
on_result=lambda results: self.process_paginated_result(
|
||||
required_fields, results, request.user.user_timezone
|
||||
),
|
||||
)
|
||||
return Response(paginated_data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -37,7 +37,7 @@ from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPaginator
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.utils.error_codes import ERROR_CODES
|
||||
|
||||
from plane.utils.host import base_host
|
||||
# Module imports
|
||||
from .. import BaseViewSet, BaseAPIView
|
||||
|
||||
@@ -259,7 +259,7 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue.archived_at = timezone.now().date()
|
||||
issue.save()
|
||||
@@ -287,7 +287,7 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue.archived_at = None
|
||||
issue.save()
|
||||
@@ -333,7 +333,7 @@ class BulkArchiveIssuesEndpoint(BaseAPIView):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue.archived_at = timezone.now().date()
|
||||
bulk_archive_issues.append(issue)
|
||||
|
||||
@@ -21,7 +21,7 @@ from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.settings.storage import S3Storage
|
||||
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueAttachmentEndpoint(BaseAPIView):
|
||||
serializer_class = IssueAttachmentSerializer
|
||||
@@ -48,7 +48,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
current_instance=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -67,7 +67,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -155,7 +155,7 @@ class IssueAttachmentV2Endpoint(BaseAPIView):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -213,7 +213,7 @@ class IssueAttachmentV2Endpoint(BaseAPIView):
|
||||
current_instance=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
# Update the attachment
|
||||
|
||||
@@ -61,6 +61,7 @@ from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
from plane.utils.global_paginator import paginate
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from plane.bgtasks.issue_description_version_task import issue_description_version_task
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueListEndpoint(BaseAPIView):
|
||||
@@ -379,7 +380,7 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue = (
|
||||
issue_queryset_grouper(
|
||||
@@ -429,7 +430,7 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
# updated issue description version
|
||||
issue_description_version_task.delay(
|
||||
@@ -565,7 +566,7 @@ class IssueViewSet(BaseViewSet):
|
||||
):
|
||||
return Response(
|
||||
{"error": "You are not allowed to view this issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
recent_visited_task.delay(
|
||||
@@ -632,7 +633,7 @@ class IssueViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
current_instance = json.dumps(
|
||||
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
IssueDetailSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
|
||||
@@ -650,7 +651,7 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
model_activity.delay(
|
||||
model_name="issue",
|
||||
@@ -659,7 +660,7 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
# updated issue description version
|
||||
issue_description_version_task.delay(
|
||||
@@ -691,7 +692,8 @@ class IssueViewSet(BaseViewSet):
|
||||
current_instance={},
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
subscriber=False,
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -1033,9 +1035,17 @@ class IssueBulkUpdateDateEndpoint(BaseAPIView):
|
||||
"""
|
||||
Validate that start date is before target date.
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
start = new_start or current_start
|
||||
target = new_target or current_target
|
||||
|
||||
# Convert string dates to datetime objects if they're strings
|
||||
if isinstance(start, str):
|
||||
start = datetime.strptime(start, "%Y-%m-%d").date()
|
||||
if isinstance(target, str):
|
||||
target = datetime.strptime(target, "%Y-%m-%d").date()
|
||||
|
||||
if start and target and start > target:
|
||||
return False
|
||||
return True
|
||||
@@ -1277,7 +1287,7 @@ class IssueDetailIdentifierEndpoint(BaseAPIView):
|
||||
):
|
||||
return Response(
|
||||
{"error": "You are not allowed to view this issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
recent_visited_task.delay(
|
||||
|
||||
@@ -17,7 +17,7 @@ from plane.app.serializers import IssueCommentSerializer, CommentReactionSeriali
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.db.models import IssueComment, ProjectMember, CommentReaction, Project, Issue
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueCommentViewSet(BaseViewSet):
|
||||
serializer_class = IssueCommentSerializer
|
||||
@@ -87,7 +87,7 @@ class IssueCommentViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -121,7 +121,7 @@ class IssueCommentViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -144,7 +144,7 @@ class IssueCommentViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -188,7 +188,7 @@ class CommentReactionViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -222,7 +222,7 @@ class CommentReactionViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
comment_reaction.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -15,7 +15,7 @@ from plane.app.serializers import IssueLinkSerializer
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.db.models import IssueLink
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueLinkViewSet(BaseViewSet):
|
||||
permission_classes = [ProjectEntityPermission]
|
||||
@@ -52,7 +52,7 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -77,7 +77,7 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -98,7 +98,7 @@ class IssueLinkViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue_link.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -15,7 +15,7 @@ from plane.app.serializers import IssueReactionSerializer
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.db.models import IssueReaction
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueReactionViewSet(BaseViewSet):
|
||||
serializer_class = IssueReactionSerializer
|
||||
@@ -53,7 +53,7 @@ class IssueReactionViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -78,7 +78,7 @@ class IssueReactionViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
issue_reaction.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -27,7 +27,7 @@ from plane.db.models import (
|
||||
)
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_relation_mapper import get_actual_relation
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class IssueRelationViewSet(BaseViewSet):
|
||||
serializer_class = IssueRelationSerializer
|
||||
@@ -253,7 +253,7 @@ class IssueRelationViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
if relation_type in ["blocking", "start_after", "finish_after"]:
|
||||
@@ -290,6 +290,6 @@ class IssueRelationViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -22,7 +22,7 @@ from plane.db.models import Issue, IssueLink, FileAsset, CycleIssue
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.timezone_converter import user_timezone_converter
|
||||
from collections import defaultdict
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class SubIssuesEndpoint(BaseAPIView):
|
||||
permission_classes = [ProjectEntityPermission]
|
||||
@@ -176,7 +176,7 @@ class SubIssuesEndpoint(BaseAPIView):
|
||||
current_instance=json.dumps({"parent": str(sub_issue_id)}),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for sub_issue_id in sub_issue_ids
|
||||
]
|
||||
|
||||
@@ -3,7 +3,13 @@ from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import IssueVersion, IssueDescriptionVersion
|
||||
from plane.db.models import (
|
||||
IssueVersion,
|
||||
IssueDescriptionVersion,
|
||||
Project,
|
||||
ProjectMember,
|
||||
Issue,
|
||||
)
|
||||
from ..base import BaseAPIView
|
||||
from plane.app.serializers import (
|
||||
IssueVersionDetailSerializer,
|
||||
@@ -66,7 +72,7 @@ class IssueVersionEndpoint(BaseAPIView):
|
||||
return Response(paginated_data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class IssueDescriptionVersionEndpoint(BaseAPIView):
|
||||
class WorkItemDescriptionVersionEndpoint(BaseAPIView):
|
||||
def process_paginated_result(self, fields, results, timezone):
|
||||
paginated_data = results.values(*fields)
|
||||
|
||||
@@ -78,10 +84,34 @@ class IssueDescriptionVersionEndpoint(BaseAPIView):
|
||||
return paginated_data
|
||||
|
||||
@allow_permission(allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def get(self, request, slug, project_id, issue_id, pk=None):
|
||||
def get(self, request, slug, project_id, work_item_id, pk=None):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
issue = Issue.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=work_item_id
|
||||
)
|
||||
|
||||
if (
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
member=request.user,
|
||||
role=ROLE.GUEST.value,
|
||||
is_active=True,
|
||||
).exists()
|
||||
and not project.guest_view_all_features
|
||||
and not issue.created_by == request.user
|
||||
):
|
||||
return Response(
|
||||
{"error": "You are not allowed to view this issue"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
if pk:
|
||||
issue_description_version = IssueDescriptionVersion.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_id=work_item_id,
|
||||
pk=pk,
|
||||
)
|
||||
|
||||
serializer = IssueDescriptionVersionDetailSerializer(
|
||||
@@ -105,8 +135,8 @@ class IssueDescriptionVersionEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
issue_description_versions_queryset = IssueDescriptionVersion.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id
|
||||
)
|
||||
workspace__slug=slug, project_id=project_id, issue_id=work_item_id
|
||||
).order_by("-created_at")
|
||||
paginated_data = paginate(
|
||||
base_queryset=issue_description_versions_queryset,
|
||||
queryset=issue_description_versions_queryset,
|
||||
|
||||
@@ -61,7 +61,7 @@ from plane.utils.timezone_converter import user_timezone_converter
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from .. import BaseAPIView, BaseViewSet
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class ModuleViewSet(BaseViewSet):
|
||||
model = Module
|
||||
@@ -376,7 +376,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
module = user_timezone_converter(
|
||||
@@ -768,7 +768,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
datetime_fields = ["created_at", "updated_at"]
|
||||
@@ -795,7 +795,7 @@ class ModuleViewSet(BaseViewSet):
|
||||
current_instance=json.dumps({"module_name": str(module.name)}),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for issue in module_issues
|
||||
]
|
||||
|
||||
@@ -34,7 +34,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class ModuleIssueViewSet(BaseViewSet):
|
||||
serializer_class = ModuleIssueSerializer
|
||||
@@ -221,7 +221,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for issue in issues
|
||||
]
|
||||
@@ -261,7 +261,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for module in modules
|
||||
]
|
||||
@@ -284,7 +284,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
module_issue.delete()
|
||||
|
||||
@@ -309,7 +309,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
module_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -39,6 +39,7 @@ from plane.utils.cache import cache_response
|
||||
from plane.bgtasks.webhook_task import model_activity, webhook_activity
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
from plane.utils.exception_logger import log_exception
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ProjectViewSet(BaseViewSet):
|
||||
@@ -331,7 +332,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
serializer = ProjectListSerializer(project)
|
||||
@@ -341,7 +342,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The project name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
except Workspace.DoesNotExist:
|
||||
return Response(
|
||||
@@ -350,7 +351,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
except serializers.ValidationError:
|
||||
return Response(
|
||||
{"identifier": "The project identifier is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, pk=None):
|
||||
@@ -409,7 +410,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
current_instance=current_instance,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
serializer = ProjectListSerializer(project)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
@@ -419,7 +420,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"name": "The project name is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
except (Project.DoesNotExist, Workspace.DoesNotExist):
|
||||
return Response(
|
||||
@@ -428,7 +429,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
except serializers.ValidationError:
|
||||
return Response(
|
||||
{"identifier": "The project identifier is already taken"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
def destroy(self, request, slug, pk):
|
||||
@@ -454,7 +455,7 @@ class ProjectViewSet(BaseViewSet):
|
||||
new_value=None,
|
||||
actor_id=request.user.id,
|
||||
slug=slug,
|
||||
current_site=request.META.get("HTTP_ORIGIN"),
|
||||
current_site=base_host(request=request, is_app=True),
|
||||
event_id=project.id,
|
||||
old_identifier=None,
|
||||
new_identifier=None,
|
||||
|
||||
@@ -27,7 +27,7 @@ from plane.db.models import (
|
||||
IssueUserProperty,
|
||||
)
|
||||
from plane.db.models.project import ProjectNetwork
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class ProjectInvitationsViewset(BaseViewSet):
|
||||
serializer_class = ProjectMemberInviteSerializer
|
||||
@@ -99,7 +99,7 @@ class ProjectInvitationsViewset(BaseViewSet):
|
||||
project_invitations = ProjectMemberInvite.objects.bulk_create(
|
||||
project_invitations, batch_size=10, ignore_conflicts=True
|
||||
)
|
||||
current_site = request.META.get("HTTP_ORIGIN")
|
||||
current_site = base_host(request=request, is_app=True)
|
||||
|
||||
# Send invitations
|
||||
for invitation in project_invitations:
|
||||
|
||||
@@ -432,7 +432,7 @@ class IssueViewViewSet(BaseViewSet):
|
||||
):
|
||||
return Response(
|
||||
{"error": "You are not allowed to view this issue"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
serializer = IssueViewSerializer(issue_view)
|
||||
|
||||
@@ -29,7 +29,7 @@ class WebhookEndpoint(BaseAPIView):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"error": "URL already exists for the workspace"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
raise IntegrityError
|
||||
|
||||
|
||||
@@ -119,7 +119,9 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
# Get total members and role
|
||||
total_members=WorkspaceMember.objects.filter(workspace_id=serializer.data["id"]).count()
|
||||
total_members = WorkspaceMember.objects.filter(
|
||||
workspace_id=serializer.data["id"]
|
||||
).count()
|
||||
data = serializer.data
|
||||
data["total_members"] = total_members
|
||||
data["role"] = 20
|
||||
@@ -134,7 +136,7 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"slug": "The workspace with the slug already exists"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE")
|
||||
@@ -167,10 +169,9 @@ class UserWorkSpacesEndpoint(BaseAPIView):
|
||||
.values("count")
|
||||
)
|
||||
|
||||
role = (
|
||||
WorkspaceMember.objects.filter(workspace=OuterRef("id"), member=request.user, is_active=True)
|
||||
.values("role")
|
||||
)
|
||||
role = WorkspaceMember.objects.filter(
|
||||
workspace=OuterRef("id"), member=request.user, is_active=True
|
||||
).values("role")
|
||||
|
||||
workspace = (
|
||||
Workspace.objects.prefetch_related(
|
||||
|
||||
@@ -36,7 +36,7 @@ from plane.db.models import (
|
||||
from .. import BaseViewSet
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
|
||||
from plane.utils.host import base_host
|
||||
|
||||
class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
model = DraftIssue
|
||||
@@ -241,7 +241,7 @@ class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
if request.data.get("cycle_id", None):
|
||||
@@ -270,7 +270,7 @@ class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
if request.data.get("module_ids", []):
|
||||
@@ -300,7 +300,7 @@ class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
for module in request.data.get("module_ids", [])
|
||||
]
|
||||
|
||||
@@ -7,7 +7,6 @@ import jwt
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import validate_email
|
||||
from django.db.models import Count
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party modules
|
||||
@@ -26,7 +25,8 @@ from plane.bgtasks.event_tracking_task import workspace_invite_event
|
||||
from plane.bgtasks.workspace_invitation_task import workspace_invitation
|
||||
from plane.db.models import User, Workspace, WorkspaceMember, WorkspaceMemberInvite
|
||||
from plane.utils.cache import invalidate_cache, invalidate_cache_directly
|
||||
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
from .. import BaseViewSet
|
||||
|
||||
|
||||
@@ -122,7 +122,7 @@ class WorkspaceInvitationsViewset(BaseViewSet):
|
||||
workspace_invitations, batch_size=10, ignore_conflicts=True
|
||||
)
|
||||
|
||||
current_site = request.META.get("HTTP_ORIGIN")
|
||||
current_site = base_host(request=request, is_app=True)
|
||||
|
||||
# Send invitations
|
||||
for invitation in workspace_invitations:
|
||||
@@ -213,7 +213,7 @@ class WorkspaceJoinEndpoint(BaseAPIView):
|
||||
user=user.id if user is not None else None,
|
||||
email=email,
|
||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
ip=get_client_ip(request=request),
|
||||
event_name="MEMBER_ACCEPTED",
|
||||
accepted_from="EMAIL",
|
||||
)
|
||||
|
||||
@@ -15,8 +15,8 @@ from plane.db.models import Profile, User, WorkspaceMemberInvite
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from .error import AuthenticationException, AUTHENTICATION_ERROR_CODES
|
||||
from plane.bgtasks.user_activation_email_task import user_activation_email
|
||||
from plane.authentication.utils.host import base_host
|
||||
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
class Adapter:
|
||||
"""Common interface for all auth providers"""
|
||||
@@ -108,7 +108,7 @@ class Adapter:
|
||||
user.last_login_medium = self.provider
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = self.request.META.get("REMOTE_ADDR")
|
||||
user.last_login_ip = get_client_ip(request=self.request)
|
||||
user.last_login_uagent = self.request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
# If user is not active, send the activation email and set the user as active
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
# Python imports
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.http import HttpRequest
|
||||
# Third party imports
|
||||
from rest_framework.request import Request
|
||||
|
||||
# Module imports
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
def base_host(request, is_admin=False, is_space=False, is_app=False):
|
||||
def base_host(request: Request | HttpRequest, is_admin: bool = False, is_space: bool = False, is_app: bool = False) -> str:
|
||||
"""Utility function to return host / origin from the request"""
|
||||
# Calculate the base origin from request
|
||||
base_origin = str(
|
||||
request.META.get("HTTP_ORIGIN")
|
||||
or f"{urlsplit(request.META.get('HTTP_REFERER')).scheme}://{urlsplit(request.META.get('HTTP_REFERER')).netloc}"
|
||||
or f"""{"https" if request.is_secure() else "http"}://{request.get_host()}"""
|
||||
)
|
||||
base_origin = settings.WEB_URL or settings.APP_BASE_URL
|
||||
|
||||
# Admin redirections
|
||||
if is_admin:
|
||||
@@ -38,5 +36,5 @@ def base_host(request, is_admin=False, is_space=False, is_app=False):
|
||||
return base_origin
|
||||
|
||||
|
||||
def user_ip(request):
|
||||
return str(request.META.get("REMOTE_ADDR"))
|
||||
def user_ip(request: Request | HttpRequest) -> str:
|
||||
return get_client_ip(request=request)
|
||||
|
||||
@@ -3,8 +3,8 @@ from django.contrib.auth import login
|
||||
from django.conf import settings
|
||||
|
||||
# Module imports
|
||||
from plane.authentication.utils.host import base_host
|
||||
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
def user_login(request, user, is_app=False, is_admin=False, is_space=False):
|
||||
login(request=request, user=user)
|
||||
@@ -15,7 +15,7 @@ def user_login(request, user, is_app=False, is_admin=False, is_space=False):
|
||||
|
||||
device_info = {
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT", ""),
|
||||
"ip_address": request.META.get("REMOTE_ADDR", ""),
|
||||
"ip_address": get_client_ip(request=request),
|
||||
"domain": base_host(
|
||||
request=request, is_app=is_app, is_admin=is_admin, is_space=is_space
|
||||
),
|
||||
|
||||
@@ -19,7 +19,7 @@ from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
class SignInAuthEndpoint(View):
|
||||
def post(self, request):
|
||||
@@ -34,7 +34,7 @@ class SignInAuthEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
# Base URL join
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
@@ -58,7 +58,7 @@ class SignInAuthEndpoint(View):
|
||||
params = exc.get_error_dict()
|
||||
# Next path
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
)
|
||||
@@ -76,7 +76,7 @@ class SignInAuthEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
)
|
||||
@@ -92,7 +92,7 @@ class SignInAuthEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
)
|
||||
@@ -111,7 +111,7 @@ class SignInAuthEndpoint(View):
|
||||
user_login(request=request, user=user, is_app=True)
|
||||
# Get the redirection path
|
||||
if next_path:
|
||||
path = str(next_path)
|
||||
path = str(validate_next_path(next_path))
|
||||
else:
|
||||
path = get_redirection_path(user=user)
|
||||
|
||||
@@ -121,7 +121,7 @@ class SignInAuthEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
)
|
||||
@@ -141,7 +141,7 @@ class SignUpAuthEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -161,7 +161,7 @@ class SignUpAuthEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -179,7 +179,7 @@ class SignUpAuthEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -197,7 +197,7 @@ class SignUpAuthEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -216,7 +216,7 @@ class SignUpAuthEndpoint(View):
|
||||
user_login(request=request, user=user, is_app=True)
|
||||
# Get the redirection path
|
||||
if next_path:
|
||||
path = next_path
|
||||
path = str(validate_next_path(next_path))
|
||||
else:
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
@@ -225,7 +225,7 @@ class SignUpAuthEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
|
||||
@@ -16,7 +16,7 @@ from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
class GitHubOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
@@ -35,7 +35,7 @@ class GitHubOauthInitiateEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -49,7 +49,7 @@ class GitHubOauthInitiateEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -70,7 +70,7 @@ class GitHubCallbackEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -81,7 +81,7 @@ class GitHubCallbackEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -94,7 +94,7 @@ class GitHubCallbackEndpoint(View):
|
||||
user_login(request=request, user=user, is_app=True)
|
||||
# Get the redirection path
|
||||
if next_path:
|
||||
path = next_path
|
||||
path = str(validate_next_path(next_path))
|
||||
else:
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
@@ -103,6 +103,6 @@ class GitHubCallbackEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -16,7 +16,7 @@ from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
class GitLabOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
@@ -24,7 +24,7 @@ class GitLabOauthInitiateEndpoint(View):
|
||||
request.session["host"] = base_host(request=request, is_app=True)
|
||||
next_path = request.GET.get("next_path")
|
||||
if next_path:
|
||||
request.session["next_path"] = str(next_path)
|
||||
request.session["next_path"] = str(validate_next_path(next_path))
|
||||
|
||||
# Check instance configuration
|
||||
instance = Instance.objects.first()
|
||||
@@ -35,7 +35,7 @@ class GitLabOauthInitiateEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -49,7 +49,7 @@ class GitLabOauthInitiateEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -81,7 +81,7 @@ class GitLabCallbackEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -94,7 +94,7 @@ class GitLabCallbackEndpoint(View):
|
||||
user_login(request=request, user=user, is_app=True)
|
||||
# Get the redirection path
|
||||
if next_path:
|
||||
path = next_path
|
||||
path = str(validate_next_path(next_path))
|
||||
else:
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
@@ -103,6 +103,6 @@ class GitLabCallbackEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -18,7 +18,7 @@ from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
class GoogleOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
@@ -36,7 +36,7 @@ class GoogleOauthInitiateEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -51,7 +51,7 @@ class GoogleOauthInitiateEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -72,7 +72,7 @@ class GoogleCallbackEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
if not code:
|
||||
@@ -82,7 +82,7 @@ class GoogleCallbackEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = next_path
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
try:
|
||||
@@ -95,11 +95,11 @@ class GoogleCallbackEndpoint(View):
|
||||
# Get the redirection path
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
url = urljoin(base_host, str(next_path) if next_path else path)
|
||||
url = urljoin(base_host, str(validate_next_path(next_path)) if next_path else path)
|
||||
return HttpResponseRedirect(url)
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(base_host, "?" + urlencode(params))
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -26,6 +26,7 @@ from plane.authentication.adapter.error import (
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
from plane.authentication.rate_limit import AuthenticationThrottle
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class MagicGenerateEndpoint(APIView):
|
||||
@@ -43,14 +44,13 @@ class MagicGenerateEndpoint(APIView):
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
origin = request.META.get("HTTP_ORIGIN", "/")
|
||||
email = request.data.get("email", "").strip().lower()
|
||||
try:
|
||||
validate_email(email)
|
||||
adapter = MagicCodeProvider(request=request, key=email)
|
||||
key, token = adapter.initiate()
|
||||
# If the smtp is configured send through here
|
||||
magic_link.delay(email, key, token, origin)
|
||||
magic_link.delay(email, key, token)
|
||||
return Response({"key": str(key)}, status=status.HTTP_200_OK)
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
@@ -73,7 +73,7 @@ class MagicSignInEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
)
|
||||
@@ -89,7 +89,7 @@ class MagicSignInEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
)
|
||||
@@ -122,7 +122,7 @@ class MagicSignInEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
|
||||
)
|
||||
@@ -145,7 +145,7 @@ class MagicSignUpEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -159,7 +159,7 @@ class MagicSignUpEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
@@ -177,7 +177,7 @@ class MagicSignUpEndpoint(View):
|
||||
user_login(request=request, user=user, is_app=True)
|
||||
# Get the redirection path
|
||||
if next_path:
|
||||
path = str(next_path)
|
||||
path = str(validate_next_path(next_path))
|
||||
else:
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
@@ -187,7 +187,7 @@ class MagicSignUpEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = urljoin(
|
||||
base_host(request=request, is_app=True), "?" + urlencode(params)
|
||||
)
|
||||
|
||||
@@ -80,7 +80,7 @@ class ForgotPasswordEndpoint(APIView):
|
||||
if user:
|
||||
# Get the reset token for user
|
||||
uidb64, token = generate_password_token(user=user)
|
||||
current_site = request.META.get("HTTP_ORIGIN")
|
||||
current_site = base_host(request=request, is_app=True)
|
||||
# send the forgot password email
|
||||
forgot_password.delay(
|
||||
user.first_name, user.email, uidb64, token, current_site
|
||||
|
||||
@@ -17,6 +17,7 @@ from plane.authentication.adapter.error import (
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
AuthenticationException,
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class SignInAuthSpaceEndpoint(View):
|
||||
@@ -32,7 +33,7 @@ class SignInAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -51,7 +52,7 @@ class SignInAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -67,7 +68,7 @@ class SignInAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -82,7 +83,7 @@ class SignInAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -99,7 +100,7 @@ class SignInAuthSpaceEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -117,7 +118,7 @@ class SignUpAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -135,7 +136,7 @@ class SignUpAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
# Validate the email
|
||||
@@ -151,7 +152,7 @@ class SignUpAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -166,7 +167,7 @@ class SignUpAuthSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -183,6 +184,6 @@ class SignUpAuthSpaceEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -15,6 +15,7 @@ from plane.authentication.adapter.error import (
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
AuthenticationException,
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GitHubOauthInitiateSpaceEndpoint(View):
|
||||
@@ -34,7 +35,7 @@ class GitHubOauthInitiateSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -66,7 +67,7 @@ class GitHubCallbackSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -77,7 +78,7 @@ class GitHubCallbackSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -93,6 +94,6 @@ class GitHubCallbackSpaceEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -15,6 +15,7 @@ from plane.authentication.adapter.error import (
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
AuthenticationException,
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GitLabOauthInitiateSpaceEndpoint(View):
|
||||
@@ -34,7 +35,7 @@ class GitLabOauthInitiateSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -66,7 +67,7 @@ class GitLabCallbackSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -77,7 +78,7 @@ class GitLabCallbackSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -93,6 +94,6 @@ class GitLabCallbackSpaceEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -15,6 +15,7 @@ from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GoogleOauthInitiateSpaceEndpoint(View):
|
||||
@@ -33,7 +34,7 @@ class GoogleOauthInitiateSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -46,7 +47,7 @@ class GoogleOauthInitiateSpaceEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -65,7 +66,7 @@ class GoogleCallbackSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
if not code:
|
||||
@@ -75,7 +76,7 @@ class GoogleCallbackSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = next_path
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
try:
|
||||
@@ -89,6 +90,6 @@ class GoogleCallbackSpaceEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -23,7 +23,7 @@ from plane.authentication.adapter.error import (
|
||||
AuthenticationException,
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
)
|
||||
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
class MagicGenerateSpaceEndpoint(APIView):
|
||||
permission_classes = [AllowAny]
|
||||
@@ -38,14 +38,14 @@ class MagicGenerateSpaceEndpoint(APIView):
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
origin = base_host(request=request, is_space=True)
|
||||
|
||||
email = request.data.get("email", "").strip().lower()
|
||||
try:
|
||||
validate_email(email)
|
||||
adapter = MagicCodeProvider(request=request, key=email)
|
||||
key, token = adapter.initiate()
|
||||
# If the smtp is configured send through here
|
||||
magic_link.delay(email, key, token, origin)
|
||||
magic_link.delay(email, key, token)
|
||||
return Response({"key": str(key)}, status=status.HTTP_200_OK)
|
||||
except AuthenticationException as e:
|
||||
return Response(e.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -67,7 +67,7 @@ class MagicSignInSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -80,7 +80,7 @@ class MagicSignInSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -121,7 +121,7 @@ class MagicSignUpSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
# Existing User
|
||||
@@ -134,7 +134,7 @@ class MagicSignUpSpaceEndpoint(View):
|
||||
)
|
||||
params = exc.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -152,6 +152,6 @@ class MagicSignUpSpaceEndpoint(View):
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
if next_path:
|
||||
params["next_path"] = str(next_path)
|
||||
params["next_path"] = str(validate_next_path(next_path))
|
||||
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -90,7 +90,7 @@ class ForgotPasswordSpaceEndpoint(APIView):
|
||||
if user:
|
||||
# Get the reset token for user
|
||||
uidb64, token = generate_password_token(user=user)
|
||||
current_site = request.META.get("HTTP_ORIGIN")
|
||||
current_site = base_host(request=request, is_space=True)
|
||||
# send the forgot password email
|
||||
forgot_password.delay(
|
||||
user.first_name, user.email, uidb64, token, current_site
|
||||
|
||||
@@ -7,6 +7,7 @@ from django.utils import timezone
|
||||
# Module imports
|
||||
from plane.authentication.utils.host import base_host, user_ip
|
||||
from plane.db.models import User
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class SignOutAuthSpaceEndpoint(View):
|
||||
@@ -21,8 +22,8 @@ class SignOutAuthSpaceEndpoint(View):
|
||||
user.save()
|
||||
# Log the user out
|
||||
logout(request)
|
||||
url = f"{base_host(request=request, is_space=True)}{next_path}"
|
||||
url = f"{base_host(request=request, is_space=True)}{str(validate_next_path(next_path)) if next_path else ''}"
|
||||
return HttpResponseRedirect(url)
|
||||
except Exception:
|
||||
url = f"{base_host(request=request, is_space=True)}{next_path}"
|
||||
url = f"{base_host(request=request, is_space=True)}{str(validate_next_path(next_path)) if next_path else ''}"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
@@ -32,7 +32,7 @@ from plane.settings.redis import redis_instance
|
||||
from plane.utils.exception_logger import log_exception
|
||||
from plane.bgtasks.webhook_task import webhook_activity
|
||||
from plane.utils.issue_relation_mapper import get_inverse_relation
|
||||
from plane.utils.valid_uuid import is_valid_uuid
|
||||
from plane.utils.uuid import is_valid_uuid
|
||||
|
||||
|
||||
# Track Changes in name
|
||||
@@ -307,6 +307,10 @@ def track_labels(
|
||||
|
||||
# Set of newly added labels
|
||||
for added_label in added_labels:
|
||||
# validate uuids
|
||||
if not is_valid_uuid(added_label):
|
||||
continue
|
||||
|
||||
label = Label.objects.get(pk=added_label)
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
@@ -327,6 +331,10 @@ def track_labels(
|
||||
|
||||
# Set of dropped labels
|
||||
for dropped_label in dropped_labels:
|
||||
# validate uuids
|
||||
if not is_valid_uuid(dropped_label):
|
||||
continue
|
||||
|
||||
label = Label.objects.get(pk=dropped_label)
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
@@ -373,6 +381,10 @@ def track_assignees(
|
||||
|
||||
bulk_subscribers = []
|
||||
for added_asignee in added_assignees:
|
||||
# validate uuids
|
||||
if not is_valid_uuid(added_asignee):
|
||||
continue
|
||||
|
||||
assignee = User.objects.get(pk=added_asignee)
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
@@ -406,6 +418,10 @@ def track_assignees(
|
||||
)
|
||||
|
||||
for dropped_assignee in dropped_assginees:
|
||||
# validate uuids
|
||||
if not is_valid_uuid(dropped_assignee):
|
||||
continue
|
||||
|
||||
assignee = User.objects.get(pk=dropped_assignee)
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
@@ -466,7 +482,7 @@ def track_estimate_points(
|
||||
),
|
||||
old_value=old_estimate.value if old_estimate else None,
|
||||
new_value=new_estimate.value if new_estimate else None,
|
||||
field="estimate_point",
|
||||
field="estimate_" + new_estimate.estimate.type,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
comment="updated the estimate point to ",
|
||||
|
||||
@@ -16,7 +16,7 @@ from plane.utils.exception_logger import log_exception
|
||||
|
||||
|
||||
@shared_task
|
||||
def magic_link(email, key, token, current_site):
|
||||
def magic_link(email, key, token):
|
||||
try:
|
||||
(
|
||||
EMAIL_HOST,
|
||||
|
||||
@@ -1,8 +1,16 @@
|
||||
# Python imports
|
||||
import os
|
||||
import logging
|
||||
|
||||
# Third party imports
|
||||
from celery import Celery
|
||||
from plane.settings.redis import redis_instance
|
||||
from pythonjsonlogger.jsonlogger import JsonFormatter
|
||||
from celery.signals import after_setup_logger, after_setup_task_logger
|
||||
from celery.schedules import crontab
|
||||
|
||||
# Module imports
|
||||
from plane.settings.redis import redis_instance
|
||||
|
||||
# Set the default Django settings module for the 'celery' program.
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
|
||||
|
||||
@@ -47,6 +55,28 @@ app.conf.beat_schedule = {
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Setup logging
|
||||
@after_setup_logger.connect
|
||||
def setup_loggers(logger, *args, **kwargs):
|
||||
formatter = JsonFormatter(
|
||||
'"%(levelname)s %(asctime)s %(module)s %(name)s %(message)s'
|
||||
)
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(fmt=formatter)
|
||||
logger.addHandler(handler)
|
||||
|
||||
|
||||
@after_setup_task_logger.connect
|
||||
def setup_task_loggers(logger, *args, **kwargs):
|
||||
formatter = JsonFormatter(
|
||||
'"%(levelname)s %(asctime)s %(module)s %(name)s %(message)s'
|
||||
)
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(fmt=formatter)
|
||||
logger.addHandler(handler)
|
||||
|
||||
|
||||
# Load task modules from all registered Django app configs.
|
||||
app.autodiscover_tasks()
|
||||
|
||||
|
||||
@@ -0,0 +1,102 @@
|
||||
# Django imports
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db.models import Max
|
||||
from django.db import connection, transaction
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import Project, Issue, IssueSequence
|
||||
from plane.utils.uuid import convert_uuid_to_integer
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Fix duplicate sequences"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
# Positional argument
|
||||
parser.add_argument("issue_identifier", type=str, help="Issue Identifier")
|
||||
|
||||
def strict_str_to_int(self, s):
|
||||
if not s.isdigit() and not (s.startswith("-") and s[1:].isdigit()):
|
||||
raise ValueError("Invalid integer string")
|
||||
return int(s)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
workspace_slug = input("Workspace slug: ")
|
||||
|
||||
if not workspace_slug:
|
||||
raise CommandError("Workspace slug is required")
|
||||
|
||||
issue_identifier = options.get("issue_identifier", False)
|
||||
|
||||
# Validate issue_identifier
|
||||
if not issue_identifier:
|
||||
raise CommandError("Issue identifier is required")
|
||||
|
||||
# Validate issue identifier
|
||||
try:
|
||||
identifier = issue_identifier.split("-")
|
||||
|
||||
if len(identifier) != 2:
|
||||
raise ValueError("Invalid issue identifier format")
|
||||
|
||||
project_identifier = identifier[0]
|
||||
issue_sequence = self.strict_str_to_int(identifier[1])
|
||||
|
||||
# Fetch the project
|
||||
project = Project.objects.get(
|
||||
identifier__iexact=project_identifier, workspace__slug=workspace_slug
|
||||
)
|
||||
|
||||
# Get the issues
|
||||
issues = Issue.objects.filter(project=project, sequence_id=issue_sequence)
|
||||
# Check if there are duplicate issues
|
||||
if not issues.count() > 1:
|
||||
raise CommandError(
|
||||
"No duplicate issues found with the given identifier"
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"{issues.count()} issues found with identifier {issue_identifier}"
|
||||
)
|
||||
)
|
||||
with transaction.atomic():
|
||||
# This ensures only one transaction per project can execute this code at a time
|
||||
lock_key = convert_uuid_to_integer(project.id)
|
||||
|
||||
# Acquire an exclusive lock using the project ID as the lock key
|
||||
with connection.cursor() as cursor:
|
||||
# Get an exclusive lock using the project ID as the lock key
|
||||
cursor.execute("SELECT pg_advisory_xact_lock(%s)", [lock_key])
|
||||
|
||||
# Get the maximum sequence ID for the project
|
||||
last_sequence = IssueSequence.objects.filter(project=project).aggregate(
|
||||
largest=Max("sequence")
|
||||
)["largest"]
|
||||
|
||||
bulk_issues = []
|
||||
bulk_issue_sequences = []
|
||||
|
||||
issue_sequence_map = {
|
||||
isq.issue_id: isq
|
||||
for isq in IssueSequence.objects.filter(project=project)
|
||||
}
|
||||
|
||||
# change the ids of duplicate issues
|
||||
for index, issue in enumerate(issues[1:]):
|
||||
updated_sequence_id = last_sequence + index + 1
|
||||
issue.sequence_id = updated_sequence_id
|
||||
bulk_issues.append(issue)
|
||||
|
||||
# Find the same issue sequence instance from the above queryset
|
||||
sequence_identifier = issue_sequence_map.get(issue.id)
|
||||
if sequence_identifier:
|
||||
sequence_identifier.sequence = updated_sequence_id
|
||||
bulk_issue_sequences.append(sequence_identifier)
|
||||
|
||||
Issue.objects.bulk_update(bulk_issues, ["sequence_id"])
|
||||
IssueSequence.objects.bulk_update(bulk_issue_sequences, ["sequence"])
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("Sequence IDs updated successfully"))
|
||||
except Exception as e:
|
||||
raise CommandError(str(e))
|
||||
@@ -0,0 +1,78 @@
|
||||
import time
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
from plane.db.models import Workspace
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Updates the slug of a soft-deleted workspace by appending the epoch timestamp"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"slug",
|
||||
type=str,
|
||||
help="The slug of the workspace to update",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Run the command without making any changes",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
slug = options["slug"]
|
||||
dry_run = options["dry_run"]
|
||||
|
||||
# Get the workspace with the specified slug
|
||||
try:
|
||||
workspace = Workspace.all_objects.get(slug=slug)
|
||||
except Workspace.DoesNotExist:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f"Workspace with slug '{slug}' not found.")
|
||||
)
|
||||
return
|
||||
|
||||
# Check if the workspace is soft-deleted
|
||||
if workspace.deleted_at is None:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f"Workspace '{workspace.name}' (slug: {workspace.slug}) is not deleted."
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
# Check if the slug already has a timestamp appended
|
||||
if "__" in workspace.slug and workspace.slug.split("__")[-1].isdigit():
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f"Workspace '{workspace.name}' (slug: {workspace.slug}) already has a timestamp appended."
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
# Get the deletion timestamp
|
||||
deletion_timestamp = int(workspace.deleted_at.timestamp())
|
||||
|
||||
# Create the new slug with the deletion timestamp
|
||||
new_slug = f"{workspace.slug}__{deletion_timestamp}"
|
||||
|
||||
if dry_run:
|
||||
self.stdout.write(
|
||||
f"Would update workspace '{workspace.name}' slug from '{workspace.slug}' to '{new_slug}'"
|
||||
)
|
||||
else:
|
||||
try:
|
||||
with transaction.atomic():
|
||||
workspace.slug = new_slug
|
||||
workspace.save(update_fields=["slug"])
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Updated workspace '{workspace.name}' slug from '{workspace.slug}' to '{new_slug}'"
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
f"Error updating workspace '{workspace.name}': {str(e)}"
|
||||
)
|
||||
)
|
||||
@@ -0,0 +1,31 @@
|
||||
# Generated by Django 4.2.17 on 2025-03-04 19:29
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("db", "0092_alter_deprecateddashboardwidget_unique_together_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="page",
|
||||
name="moved_to_page",
|
||||
field=models.UUIDField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="page",
|
||||
name="moved_to_project",
|
||||
field=models.UUIDField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="pageversion",
|
||||
name="sub_pages_data",
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
]
|
||||
@@ -82,4 +82,4 @@ from .label import Label
|
||||
|
||||
from .device import Device, DeviceSession
|
||||
|
||||
from .sticky import Sticky
|
||||
from .sticky import Sticky
|
||||
@@ -31,6 +31,10 @@ class Intake(ProjectBaseModel):
|
||||
ordering = ("name",)
|
||||
|
||||
|
||||
class SourceType(models.TextChoices):
|
||||
IN_APP = "IN_APP"
|
||||
|
||||
|
||||
class IntakeIssue(ProjectBaseModel):
|
||||
intake = models.ForeignKey(
|
||||
"db.Intake", related_name="issue_intake", on_delete=models.CASCADE
|
||||
|
||||
@@ -6,7 +6,7 @@ from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models, transaction
|
||||
from django.db import models, transaction, connection
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q
|
||||
from django import apps
|
||||
@@ -15,8 +15,8 @@ from django import apps
|
||||
from plane.utils.html_processor import strip_tags
|
||||
from plane.db.mixins import SoftDeletionManager
|
||||
from plane.utils.exception_logger import log_exception
|
||||
from .base import BaseModel
|
||||
from .project import ProjectBaseModel
|
||||
from plane.utils.uuid import convert_uuid_to_integer
|
||||
|
||||
|
||||
def get_default_properties():
|
||||
@@ -209,11 +209,18 @@ class Issue(ProjectBaseModel):
|
||||
|
||||
if self._state.adding:
|
||||
with transaction.atomic():
|
||||
last_sequence = (
|
||||
IssueSequence.objects.filter(project=self.project)
|
||||
.select_for_update()
|
||||
.aggregate(largest=models.Max("sequence"))["largest"]
|
||||
)
|
||||
# Create a lock for this specific project using an advisory lock
|
||||
# This ensures only one transaction per project can execute this code at a time
|
||||
lock_key = convert_uuid_to_integer(self.project.id)
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
# Get an exclusive lock using the project ID as the lock key
|
||||
cursor.execute("SELECT pg_advisory_xact_lock(%s)", [lock_key])
|
||||
|
||||
# Get the last sequence for the project
|
||||
last_sequence = IssueSequence.objects.filter(
|
||||
project=self.project
|
||||
).aggregate(largest=models.Max("sequence"))["largest"]
|
||||
self.sequence_id = last_sequence + 1 if last_sequence else 1
|
||||
# Strip the html tags using html parser
|
||||
self.description_stripped = (
|
||||
|
||||
@@ -50,6 +50,8 @@ class Page(BaseModel):
|
||||
projects = models.ManyToManyField(
|
||||
"db.Project", related_name="pages", through="db.ProjectPage"
|
||||
)
|
||||
moved_to_page = models.UUIDField(null=True, blank=True)
|
||||
moved_to_project = models.UUIDField(null=True, blank=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Page"
|
||||
@@ -172,6 +174,7 @@ class PageVersion(BaseModel):
|
||||
description_html = models.TextField(blank=True, default="<p></p>")
|
||||
description_stripped = models.TextField(blank=True, null=True)
|
||||
description_json = models.JSONField(default=dict, blank=True)
|
||||
sub_pages_data = models.JSONField(default=dict, blank=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Page Version"
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
# Python imports
|
||||
from django.db.models.functions import Ln
|
||||
import pytz
|
||||
import time
|
||||
from django.utils import timezone
|
||||
from typing import Optional, Any, Tuple, Dict
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
@@ -149,6 +152,34 @@ class Workspace(BaseModel):
|
||||
return self.logo
|
||||
return None
|
||||
|
||||
def delete(
|
||||
self,
|
||||
using: Optional[str] = None,
|
||||
soft: bool = True,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
):
|
||||
"""
|
||||
Override the delete method to append epoch timestamp to the slug when soft deleting.
|
||||
|
||||
Args:
|
||||
using: The database alias to use for the deletion.
|
||||
soft: Whether to perform a soft delete (True) or hard delete (False).
|
||||
*args: Additional positional arguments.
|
||||
**kwargs: Additional keyword arguments.
|
||||
"""
|
||||
# Call the parent class's delete method first
|
||||
result = super().delete(using=using, soft=soft, *args, **kwargs)
|
||||
|
||||
# If it's a soft delete and the model still exists (not hard deleted)
|
||||
if soft and hasattr(self, 'deleted_at') and self.deleted_at:
|
||||
# Use the deleted_at timestamp to update the slug
|
||||
deletion_timestamp: int = int(self.deleted_at.timestamp())
|
||||
self.slug = f"{self.slug}__{deletion_timestamp}"
|
||||
self.save(update_fields=["slug"])
|
||||
|
||||
return result
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Workspace"
|
||||
verbose_name_plural = "Workspaces"
|
||||
@@ -391,7 +422,7 @@ class WorkspaceHomePreference(BaseModel):
|
||||
class WorkspaceUserPreference(BaseModel):
|
||||
"""Preference for the workspace for a user"""
|
||||
|
||||
class UserPreferenceKeys(models.TextChoices):
|
||||
class UserPreferenceKeys(models.TextChoices):
|
||||
VIEWS = "views", "Views"
|
||||
ACTIVE_CYCLES = "active_cycles", "Active Cycles"
|
||||
ANALYTICS = "analytics", "Analytics"
|
||||
|
||||
@@ -33,6 +33,7 @@ from plane.authentication.adapter.error import (
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
AuthenticationException,
|
||||
)
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
|
||||
class InstanceAdminEndpoint(BaseAPIView):
|
||||
@@ -217,7 +218,7 @@ class InstanceAdminSignUpEndpoint(View):
|
||||
user.is_active = True
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
user.last_login_ip = get_client_ip(request=request)
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
@@ -344,7 +345,7 @@ class InstanceAdminSignInEndpoint(View):
|
||||
user.is_active = True
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
user.last_login_ip = get_client_ip(request=request)
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
|
||||
@@ -109,5 +109,5 @@ class InstanceWorkSpaceEndpoint(BaseAPIView):
|
||||
if "already exists" in str(e):
|
||||
return Response(
|
||||
{"slug": "The workspace with the slug already exists"},
|
||||
status=status.HTTP_410_GONE,
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
from plane.db.models import APIActivityLog
|
||||
|
||||
|
||||
class APITokenLogMiddleware:
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
request_body = request.body
|
||||
response = self.get_response(request)
|
||||
self.process_request(request, response, request_body)
|
||||
return response
|
||||
|
||||
def process_request(self, request, response, request_body):
|
||||
api_key_header = "X-Api-Key"
|
||||
api_key = request.headers.get(api_key_header)
|
||||
# If the API key is present, log the request
|
||||
if api_key:
|
||||
try:
|
||||
APIActivityLog.objects.create(
|
||||
token_identifier=api_key,
|
||||
path=request.path,
|
||||
method=request.method,
|
||||
query_params=request.META.get("QUERY_STRING", ""),
|
||||
headers=str(request.headers),
|
||||
body=(request_body.decode("utf-8") if request_body else None),
|
||||
response_body=(
|
||||
response.content.decode("utf-8") if response.content else None
|
||||
),
|
||||
response_code=response.status_code,
|
||||
ip_address=request.META.get("REMOTE_ADDR", None),
|
||||
user_agent=request.META.get("HTTP_USER_AGENT", None),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
# If the token does not exist, you can decide whether to log this as an invalid attempt
|
||||
|
||||
return None
|
||||
111
apiserver/plane/middleware/logger.py
Normal file
111
apiserver/plane/middleware/logger.py
Normal file
@@ -0,0 +1,111 @@
|
||||
# Python imports
|
||||
import logging
|
||||
import time
|
||||
|
||||
# Django imports
|
||||
from django.http import HttpRequest
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.request import Request
|
||||
|
||||
# Module imports
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
from plane.db.models import APIActivityLog
|
||||
|
||||
|
||||
api_logger = logging.getLogger("plane.api.request")
|
||||
|
||||
|
||||
class RequestLoggerMiddleware:
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def _should_log_route(self, request: Request | HttpRequest) -> bool:
|
||||
"""
|
||||
Determines whether a route should be logged based on the request and status code.
|
||||
"""
|
||||
# Don't log health checks
|
||||
if request.path == "/" and request.method == "GET":
|
||||
return False
|
||||
return True
|
||||
|
||||
def __call__(self, request):
|
||||
# get the start time
|
||||
start_time = time.time()
|
||||
|
||||
# Get the response
|
||||
response = self.get_response(request)
|
||||
|
||||
# calculate the duration
|
||||
duration = time.time() - start_time
|
||||
|
||||
# Check if logging is required
|
||||
log_true = self._should_log_route(request=request)
|
||||
|
||||
# If logging is not required, return the response
|
||||
if not log_true:
|
||||
return response
|
||||
|
||||
user_id = (
|
||||
request.user.id
|
||||
if getattr(request, "user")
|
||||
and getattr(request.user, "is_authenticated", False)
|
||||
else None
|
||||
)
|
||||
|
||||
user_agent = request.META.get("HTTP_USER_AGENT", "")
|
||||
|
||||
# Log the request information
|
||||
api_logger.info(
|
||||
f"{request.method} {request.get_full_path()} {response.status_code}",
|
||||
extra={
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
"status_code": response.status_code,
|
||||
"duration_ms": int(duration * 1000),
|
||||
"remote_addr": get_client_ip(request),
|
||||
"user_agent": user_agent,
|
||||
"user_id": user_id,
|
||||
},
|
||||
)
|
||||
|
||||
# return the response
|
||||
return response
|
||||
|
||||
|
||||
class APITokenLogMiddleware:
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
request_body = request.body
|
||||
response = self.get_response(request)
|
||||
self.process_request(request, response, request_body)
|
||||
return response
|
||||
|
||||
def process_request(self, request, response, request_body):
|
||||
api_key_header = "X-Api-Key"
|
||||
api_key = request.headers.get(api_key_header)
|
||||
# If the API key is present, log the request
|
||||
if api_key:
|
||||
try:
|
||||
APIActivityLog.objects.create(
|
||||
token_identifier=api_key,
|
||||
path=request.path,
|
||||
method=request.method,
|
||||
query_params=request.META.get("QUERY_STRING", ""),
|
||||
headers=str(request.headers),
|
||||
body=(request_body.decode("utf-8") if request_body else None),
|
||||
response_body=(
|
||||
response.content.decode("utf-8") if response.content else None
|
||||
),
|
||||
response_code=response.status_code,
|
||||
ip_address=get_client_ip(request=request),
|
||||
user_agent=request.META.get("HTTP_USER_AGENT", None),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
api_logger.exception(e)
|
||||
# If the token does not exist, you can decide whether to log this as an invalid attempt
|
||||
|
||||
return None
|
||||
@@ -22,7 +22,7 @@ SECRET_KEY = os.environ.get("SECRET_KEY", get_random_secret_key())
|
||||
DEBUG = int(os.environ.get("DEBUG", "0"))
|
||||
|
||||
# Allowed Hosts
|
||||
ALLOWED_HOSTS = ["*"]
|
||||
ALLOWED_HOSTS = os.environ.get("ALLOWED_HOSTS", "*").split(",")
|
||||
|
||||
# Application definition
|
||||
INSTALLED_APPS = [
|
||||
@@ -58,7 +58,8 @@ MIDDLEWARE = [
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||
"crum.CurrentRequestUserMiddleware",
|
||||
"django.middleware.gzip.GZipMiddleware",
|
||||
"plane.middleware.api_log_middleware.APITokenLogMiddleware",
|
||||
"plane.middleware.logger.APITokenLogMiddleware",
|
||||
"plane.middleware.logger.RequestLoggerMiddleware",
|
||||
]
|
||||
|
||||
# Rest Framework settings
|
||||
@@ -314,7 +315,7 @@ ADMIN_BASE_URL = os.environ.get("ADMIN_BASE_URL", None)
|
||||
SPACE_BASE_URL = os.environ.get("SPACE_BASE_URL", None)
|
||||
APP_BASE_URL = os.environ.get("APP_BASE_URL")
|
||||
LIVE_BASE_URL = os.environ.get("LIVE_BASE_URL")
|
||||
|
||||
WEB_URL = os.environ.get("WEB_URL")
|
||||
|
||||
HARD_DELETE_AFTER_DAYS = int(os.environ.get("HARD_DELETE_AFTER_DAYS", 60))
|
||||
|
||||
@@ -390,4 +391,8 @@ ATTACHMENT_MIME_TYPES = [
|
||||
"text/xml",
|
||||
"text/csv",
|
||||
"application/xml",
|
||||
# SQL
|
||||
"application/x-sql",
|
||||
# Gzip
|
||||
"application/x-gzip",
|
||||
]
|
||||
|
||||
@@ -37,26 +37,41 @@ if not os.path.exists(LOG_DIR):
|
||||
|
||||
LOGGING = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"disable_existing_loggers": True,
|
||||
"formatters": {
|
||||
"verbose": {
|
||||
"format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}",
|
||||
"style": "{",
|
||||
}
|
||||
},
|
||||
"json": {
|
||||
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
|
||||
"fmt": "%(levelname)s %(asctime)s %(module)s %(name)s %(message)s",
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"level": "DEBUG",
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "verbose",
|
||||
"formatter": "json",
|
||||
}
|
||||
},
|
||||
"loggers": {
|
||||
"django.request": {
|
||||
"plane.api.request": {
|
||||
"level": "INFO",
|
||||
"handlers": ["console"],
|
||||
"propagate": False,
|
||||
},
|
||||
"plane.api": {"level": "INFO", "handlers": ["console"], "propagate": False},
|
||||
"plane.worker": {"level": "INFO", "handlers": ["console"], "propagate": False},
|
||||
"plane.exception": {
|
||||
"level": "ERROR",
|
||||
"handlers": ["console"],
|
||||
"propagate": False,
|
||||
},
|
||||
"plane.external": {
|
||||
"level": "INFO",
|
||||
"handlers": ["console"],
|
||||
"level": "DEBUG",
|
||||
"propagate": False,
|
||||
},
|
||||
"plane": {"handlers": ["console"], "level": "DEBUG", "propagate": False},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -26,11 +26,10 @@ if not os.path.exists(LOG_DIR):
|
||||
# Logging configuration
|
||||
LOGGING = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"disable_existing_loggers": True,
|
||||
"formatters": {
|
||||
"verbose": {
|
||||
"format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}",
|
||||
"style": "{",
|
||||
"format": "%(asctime)s [%(process)d] %(levelname)s %(name)s: %(message)s"
|
||||
},
|
||||
"json": {
|
||||
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
|
||||
@@ -40,7 +39,7 @@ LOGGING = {
|
||||
"handlers": {
|
||||
"console": {
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "verbose",
|
||||
"formatter": "json",
|
||||
"level": "INFO",
|
||||
},
|
||||
"file": {
|
||||
@@ -59,16 +58,30 @@ LOGGING = {
|
||||
},
|
||||
},
|
||||
"loggers": {
|
||||
"django": {"handlers": ["console", "file"], "level": "INFO", "propagate": True},
|
||||
"django.request": {
|
||||
"handlers": ["console", "file"],
|
||||
"level": "INFO",
|
||||
"plane.api.request": {
|
||||
"level": "DEBUG" if DEBUG else "INFO",
|
||||
"handlers": ["console"],
|
||||
"propagate": False,
|
||||
},
|
||||
"plane": {
|
||||
"plane.api": {
|
||||
"level": "DEBUG" if DEBUG else "INFO",
|
||||
"handlers": ["console"],
|
||||
"propagate": False,
|
||||
},
|
||||
"plane.worker": {
|
||||
"level": "DEBUG" if DEBUG else "INFO",
|
||||
"handlers": ["console"],
|
||||
"propagate": False,
|
||||
},
|
||||
"plane.exception": {
|
||||
"level": "DEBUG" if DEBUG else "ERROR",
|
||||
"handlers": ["console", "file"],
|
||||
"propagate": False,
|
||||
},
|
||||
"plane.external": {
|
||||
"level": "INFO",
|
||||
"handlers": ["console"],
|
||||
"propagate": False,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -3,6 +3,9 @@ from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import Q, UUIDField, Value, F, Case, When, JSONField, CharField
|
||||
from django.db.models.functions import Coalesce, JSONObject, Concat
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from typing import List, Optional, Dict, Any, Union
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import (
|
||||
@@ -17,13 +20,25 @@ from plane.db.models import (
|
||||
)
|
||||
|
||||
|
||||
def issue_queryset_grouper(queryset, group_by, sub_group_by):
|
||||
def issue_queryset_grouper(
|
||||
queryset: QuerySet[Issue], group_by: Optional[str], sub_group_by: Optional[str]
|
||||
) -> QuerySet[Issue]:
|
||||
FIELD_MAPPER = {
|
||||
"label_ids": "labels__id",
|
||||
"assignee_ids": "assignees__id",
|
||||
"module_ids": "issue_module__module_id",
|
||||
}
|
||||
|
||||
GROUP_FILTER_MAPPER = {
|
||||
"assignees__id": Q(issue_assignee__deleted_at__isnull=True),
|
||||
"labels__id": Q(label_issue__deleted_at__isnull=True),
|
||||
"issue_module__module_id": Q(issue_module__deleted_at__isnull=True),
|
||||
}
|
||||
|
||||
for group_key in [group_by, sub_group_by]:
|
||||
if group_key in GROUP_FILTER_MAPPER:
|
||||
queryset = queryset.filter(GROUP_FILTER_MAPPER[group_key])
|
||||
|
||||
annotations_map = {
|
||||
"assignee_ids": (
|
||||
"assignees__id",
|
||||
@@ -50,7 +65,9 @@ def issue_queryset_grouper(queryset, group_by, sub_group_by):
|
||||
return queryset.annotate(**default_annotations)
|
||||
|
||||
|
||||
def issue_on_results(issues, group_by, sub_group_by):
|
||||
def issue_on_results(
|
||||
issues: QuerySet[Issue], group_by: Optional[str], sub_group_by: Optional[str]
|
||||
) -> List[Dict[str, Any]]:
|
||||
FIELD_MAPPER = {
|
||||
"labels__id": "label_ids",
|
||||
"assignees__id": "assignee_ids",
|
||||
@@ -160,7 +177,12 @@ def issue_on_results(issues, group_by, sub_group_by):
|
||||
return issues
|
||||
|
||||
|
||||
def issue_group_values(field, slug, project_id=None, filters=dict):
|
||||
def issue_group_values(
|
||||
field: str,
|
||||
slug: str,
|
||||
project_id: Optional[str] = None,
|
||||
filters: Dict[str, Any] = {},
|
||||
) -> List[Union[str, Any]]:
|
||||
if field == "state_id":
|
||||
queryset = State.objects.filter(
|
||||
is_triage=False, workspace__slug=slug
|
||||
|
||||
@@ -96,7 +96,7 @@ class EntityAssetEndpoint(BaseAPIView):
|
||||
if type not in allowed_types:
|
||||
return Response(
|
||||
{
|
||||
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
|
||||
"error": "Invalid file type. Only JPEG, PNG, WebP, JPG and GIF files are allowed.",
|
||||
"status": False,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
|
||||
@@ -41,4 +41,4 @@ class WorkSpaceCreateReadUpdateDelete(AuthenticatedAPITest):
|
||||
response = self.client.post(
|
||||
url, {"name": "Plane", "slug": "pla-ne"}, format="json"
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_410_GONE)
|
||||
self.assertEqual(response.status_code, status.HTTP_409_CONFLICT)
|
||||
|
||||
@@ -8,8 +8,8 @@ from django.conf import settings
|
||||
|
||||
def log_exception(e):
|
||||
# Log the error
|
||||
logger = logging.getLogger("plane")
|
||||
logger.error(e)
|
||||
logger = logging.getLogger("plane.exception")
|
||||
logger.exception(e)
|
||||
|
||||
if settings.DEBUG:
|
||||
# Print the traceback if in debug mode
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Django imports
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import Q, UUIDField, Value
|
||||
from django.db.models import Q, UUIDField, Value, QuerySet
|
||||
from django.db.models.functions import Coalesce
|
||||
|
||||
# Module imports
|
||||
@@ -15,16 +15,31 @@ from plane.db.models import (
|
||||
State,
|
||||
WorkspaceMember,
|
||||
)
|
||||
from typing import Optional, Dict, Tuple, Any, Union, List
|
||||
|
||||
|
||||
def issue_queryset_grouper(queryset, group_by, sub_group_by):
|
||||
FIELD_MAPPER = {
|
||||
def issue_queryset_grouper(
|
||||
queryset: QuerySet[Issue],
|
||||
group_by: Optional[str],
|
||||
sub_group_by: Optional[str],
|
||||
) -> QuerySet[Issue]:
|
||||
FIELD_MAPPER: Dict[str, str] = {
|
||||
"label_ids": "labels__id",
|
||||
"assignee_ids": "assignees__id",
|
||||
"module_ids": "issue_module__module_id",
|
||||
}
|
||||
|
||||
annotations_map = {
|
||||
GROUP_FILTER_MAPPER: Dict[str, Q] = {
|
||||
"assignees__id": Q(issue_assignee__deleted_at__isnull=True),
|
||||
"labels__id": Q(label_issue__deleted_at__isnull=True),
|
||||
"issue_module__module_id": Q(issue_module__deleted_at__isnull=True),
|
||||
}
|
||||
|
||||
for group_key in [group_by, sub_group_by]:
|
||||
if group_key in GROUP_FILTER_MAPPER:
|
||||
queryset = queryset.filter(GROUP_FILTER_MAPPER[group_key])
|
||||
|
||||
annotations_map: Dict[str, Tuple[str, Q]] = {
|
||||
"assignee_ids": (
|
||||
"assignees__id",
|
||||
~Q(assignees__id__isnull=True) & Q(issue_assignee__deleted_at__isnull=True),
|
||||
@@ -42,7 +57,8 @@ def issue_queryset_grouper(queryset, group_by, sub_group_by):
|
||||
),
|
||||
),
|
||||
}
|
||||
default_annotations = {
|
||||
|
||||
default_annotations: Dict[str, Any] = {
|
||||
key: Coalesce(
|
||||
ArrayAgg(field, distinct=True, filter=condition),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
@@ -54,16 +70,20 @@ def issue_queryset_grouper(queryset, group_by, sub_group_by):
|
||||
return queryset.annotate(**default_annotations)
|
||||
|
||||
|
||||
def issue_on_results(issues, group_by, sub_group_by):
|
||||
FIELD_MAPPER = {
|
||||
def issue_on_results(
|
||||
issues: QuerySet[Issue],
|
||||
group_by: Optional[str],
|
||||
sub_group_by: Optional[str],
|
||||
) -> List[Dict[str, Any]]:
|
||||
FIELD_MAPPER: Dict[str, str] = {
|
||||
"labels__id": "label_ids",
|
||||
"assignees__id": "assignee_ids",
|
||||
"issue_module__module_id": "module_ids",
|
||||
}
|
||||
|
||||
original_list = ["assignee_ids", "label_ids", "module_ids"]
|
||||
original_list: List[str] = ["assignee_ids", "label_ids", "module_ids"]
|
||||
|
||||
required_fields = [
|
||||
required_fields: List[str] = [
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
@@ -98,62 +118,72 @@ def issue_on_results(issues, group_by, sub_group_by):
|
||||
original_list.append(sub_group_by)
|
||||
|
||||
required_fields.extend(original_list)
|
||||
return issues.values(*required_fields)
|
||||
return list(issues.values(*required_fields))
|
||||
|
||||
|
||||
def issue_group_values(field, slug, project_id=None, filters=dict):
|
||||
def issue_group_values(
|
||||
field: str,
|
||||
slug: str,
|
||||
project_id: Optional[str] = None,
|
||||
filters: Dict[str, Any] = {},
|
||||
) -> List[Union[str, Any]]:
|
||||
if field == "state_id":
|
||||
queryset = State.objects.filter(
|
||||
is_triage=False, workspace__slug=slug
|
||||
).values_list("id", flat=True)
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id))
|
||||
else:
|
||||
return list(queryset)
|
||||
return list(queryset)
|
||||
|
||||
if field == "labels__id":
|
||||
queryset = Label.objects.filter(workspace__slug=slug).values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id)) + ["None"]
|
||||
else:
|
||||
return list(queryset) + ["None"]
|
||||
return list(queryset) + ["None"]
|
||||
|
||||
if field == "assignees__id":
|
||||
if project_id:
|
||||
return ProjectMember.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, is_active=True
|
||||
).values_list("member_id", flat=True)
|
||||
else:
|
||||
return list(
|
||||
WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug, is_active=True
|
||||
ProjectMember.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, is_active=True
|
||||
).values_list("member_id", flat=True)
|
||||
)
|
||||
return list(
|
||||
WorkspaceMember.objects.filter(
|
||||
workspace__slug=slug, is_active=True
|
||||
).values_list("member_id", flat=True)
|
||||
)
|
||||
|
||||
if field == "issue_module__module_id":
|
||||
queryset = Module.objects.filter(workspace__slug=slug).values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id)) + ["None"]
|
||||
else:
|
||||
return list(queryset) + ["None"]
|
||||
return list(queryset) + ["None"]
|
||||
|
||||
if field == "cycle_id":
|
||||
queryset = Cycle.objects.filter(workspace__slug=slug).values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id)) + ["None"]
|
||||
else:
|
||||
return list(queryset) + ["None"]
|
||||
return list(queryset) + ["None"]
|
||||
|
||||
if field == "project_id":
|
||||
queryset = Project.objects.filter(workspace__slug=slug).values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
return list(queryset)
|
||||
|
||||
if field == "priority":
|
||||
return ["low", "medium", "high", "urgent", "none"]
|
||||
|
||||
if field == "state__group":
|
||||
return ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||
|
||||
if field == "target_date":
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(workspace__slug=slug)
|
||||
@@ -163,8 +193,8 @@ def issue_group_values(field, slug, project_id=None, filters=dict):
|
||||
)
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id))
|
||||
else:
|
||||
return list(queryset)
|
||||
return list(queryset)
|
||||
|
||||
if field == "start_date":
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(workspace__slug=slug)
|
||||
@@ -174,8 +204,7 @@ def issue_group_values(field, slug, project_id=None, filters=dict):
|
||||
)
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id))
|
||||
else:
|
||||
return list(queryset)
|
||||
return list(queryset)
|
||||
|
||||
if field == "created_by":
|
||||
queryset = (
|
||||
@@ -186,7 +215,6 @@ def issue_group_values(field, slug, project_id=None, filters=dict):
|
||||
)
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id))
|
||||
else:
|
||||
return list(queryset)
|
||||
return list(queryset)
|
||||
|
||||
return []
|
||||
|
||||
@@ -1,18 +1,21 @@
|
||||
# Python imports
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.http import HttpRequest
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.request import Request
|
||||
|
||||
def base_host(request, is_admin=False, is_space=False, is_app=False):
|
||||
# Module imports
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
def base_host(request: Request | HttpRequest, is_admin: bool = False, is_space: bool = False, is_app: bool = False) -> str:
|
||||
"""Utility function to return host / origin from the request"""
|
||||
# Calculate the base origin from request
|
||||
base_origin = str(
|
||||
request.META.get("HTTP_ORIGIN")
|
||||
or f"{urlsplit(request.META.get('HTTP_REFERER')).scheme}://{urlsplit(request.META.get('HTTP_REFERER')).netloc}"
|
||||
or f"""{"https" if request.is_secure() else "http"}://{request.get_host()}"""
|
||||
)
|
||||
base_origin = settings.WEB_URL or settings.APP_BASE_URL
|
||||
|
||||
if not base_origin:
|
||||
raise ImproperlyConfigured("APP_BASE_URL or WEB_URL is not set")
|
||||
|
||||
# Admin redirections
|
||||
if is_admin:
|
||||
@@ -38,5 +41,5 @@ def base_host(request, is_admin=False, is_space=False, is_app=False):
|
||||
return base_origin
|
||||
|
||||
|
||||
def user_ip(request):
|
||||
return str(request.META.get("REMOTE_ADDR"))
|
||||
def user_ip(request: Request | HttpRequest) -> str:
|
||||
return get_client_ip(request=request)
|
||||
|
||||
21
apiserver/plane/utils/path_validator.py
Normal file
21
apiserver/plane/utils/path_validator.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Python imports
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
def validate_next_path(next_path: str) -> str:
|
||||
"""Validates that next_path is a valid path and extracts only the path component."""
|
||||
parsed_url = urlparse(next_path)
|
||||
|
||||
# Ensure next_path is not an absolute URL
|
||||
if parsed_url.scheme or parsed_url.netloc:
|
||||
next_path = parsed_url.path # Extract only the path component
|
||||
|
||||
# Ensure it starts with a forward slash (indicating a valid relative path)
|
||||
if not next_path.startswith("/"):
|
||||
return ""
|
||||
|
||||
# Ensure it does not contain dangerous path traversal sequences
|
||||
if ".." in next_path:
|
||||
return ""
|
||||
|
||||
return next_path
|
||||
22
apiserver/plane/utils/uuid.py
Normal file
22
apiserver/plane/utils/uuid.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Python imports
|
||||
import uuid
|
||||
import hashlib
|
||||
|
||||
|
||||
def is_valid_uuid(uuid_str):
|
||||
"""Check if a string is a valid UUID version 4"""
|
||||
try:
|
||||
uuid_obj = uuid.UUID(uuid_str)
|
||||
return uuid_obj.version == 4
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
def convert_uuid_to_integer(uuid_val: uuid.UUID) -> int:
|
||||
"""Convert a UUID to a 64-bit signed integer"""
|
||||
# Ensure UUID is a string
|
||||
uuid_value: str = str(uuid_val)
|
||||
# Hash to 64-bit signed int
|
||||
h: bytes = hashlib.sha256(uuid_value.encode()).digest()
|
||||
bigint: int = int.from_bytes(h[:8], byteorder="big", signed=True)
|
||||
return bigint
|
||||
@@ -1,8 +0,0 @@
|
||||
import uuid
|
||||
|
||||
def is_valid_uuid(uuid_str):
|
||||
try:
|
||||
uuid.UUID(uuid_str, version=4)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
@@ -43,7 +43,7 @@ scout-apm==3.1.0
|
||||
# xlsx generation
|
||||
openpyxl==3.1.2
|
||||
# logging
|
||||
python-json-logger==2.0.7
|
||||
python-json-logger==3.3.0
|
||||
# html parser
|
||||
beautifulsoup4==4.12.3
|
||||
# analytics
|
||||
|
||||
@@ -53,7 +53,7 @@ x-app-env: &app-env
|
||||
|
||||
services:
|
||||
web:
|
||||
image: ${DOCKERHUB_USER:-makeplane}/plane-frontend:${APP_RELEASE:-stable}
|
||||
image: artifacts.plane.so/makeplane/plane-frontend:${APP_RELEASE:-stable}
|
||||
command: node web/server.js web
|
||||
deploy:
|
||||
replicas: ${WEB_REPLICAS:-1}
|
||||
@@ -64,7 +64,7 @@ services:
|
||||
- worker
|
||||
|
||||
space:
|
||||
image: ${DOCKERHUB_USER:-makeplane}/plane-space:${APP_RELEASE:-stable}
|
||||
image: artifacts.plane.so/makeplane/plane-space:${APP_RELEASE:-stable}
|
||||
command: node space/server.js space
|
||||
deploy:
|
||||
replicas: ${SPACE_REPLICAS:-1}
|
||||
@@ -76,7 +76,7 @@ services:
|
||||
- web
|
||||
|
||||
admin:
|
||||
image: ${DOCKERHUB_USER:-makeplane}/plane-admin:${APP_RELEASE:-stable}
|
||||
image: artifacts.plane.so/makeplane/plane-admin:${APP_RELEASE:-stable}
|
||||
command: node admin/server.js admin
|
||||
deploy:
|
||||
replicas: ${ADMIN_REPLICAS:-1}
|
||||
@@ -87,7 +87,7 @@ services:
|
||||
- web
|
||||
|
||||
live:
|
||||
image: ${DOCKERHUB_USER:-makeplane}/plane-live:${APP_RELEASE:-stable}
|
||||
image: artifacts.plane.so/makeplane/plane-live:${APP_RELEASE:-stable}
|
||||
command: node live/dist/server.js live
|
||||
environment:
|
||||
<<: [*live-env]
|
||||
@@ -100,7 +100,7 @@ services:
|
||||
- web
|
||||
|
||||
api:
|
||||
image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-stable}
|
||||
image: artifacts.plane.so/makeplane/plane-backend:${APP_RELEASE:-stable}
|
||||
command: ./bin/docker-entrypoint-api.sh
|
||||
deploy:
|
||||
replicas: ${API_REPLICAS:-1}
|
||||
@@ -116,7 +116,7 @@ services:
|
||||
- plane-mq
|
||||
|
||||
worker:
|
||||
image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-stable}
|
||||
image: artifacts.plane.so/makeplane/plane-backend:${APP_RELEASE:-stable}
|
||||
command: ./bin/docker-entrypoint-worker.sh
|
||||
deploy:
|
||||
replicas: ${WORKER_REPLICAS:-1}
|
||||
@@ -133,7 +133,7 @@ services:
|
||||
- plane-mq
|
||||
|
||||
beat-worker:
|
||||
image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-stable}
|
||||
image: artifacts.plane.so/makeplane/plane-backend:${APP_RELEASE:-stable}
|
||||
command: ./bin/docker-entrypoint-beat.sh
|
||||
deploy:
|
||||
replicas: ${BEAT_WORKER_REPLICAS:-1}
|
||||
@@ -150,7 +150,7 @@ services:
|
||||
- plane-mq
|
||||
|
||||
migrator:
|
||||
image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-stable}
|
||||
image: artifacts.plane.so/makeplane/plane-backend:${APP_RELEASE:-stable}
|
||||
command: ./bin/docker-entrypoint-migrator.sh
|
||||
deploy:
|
||||
replicas: 1
|
||||
@@ -212,7 +212,7 @@ services:
|
||||
|
||||
# Comment this if you already have a reverse proxy running
|
||||
proxy:
|
||||
image: ${DOCKERHUB_USER:-makeplane}/plane-proxy:${APP_RELEASE:-stable}
|
||||
image: artifacts.plane.so/makeplane/plane-proxy:${APP_RELEASE:-stable}
|
||||
ports:
|
||||
- target: 80
|
||||
published: ${NGINX_PORT:-80}
|
||||
|
||||
@@ -5,7 +5,7 @@ SCRIPT_DIR=$PWD
|
||||
SERVICE_FOLDER=plane-app
|
||||
PLANE_INSTALL_DIR=$PWD/$SERVICE_FOLDER
|
||||
export APP_RELEASE=stable
|
||||
export DOCKERHUB_USER=makeplane
|
||||
export DOCKERHUB_USER=artifacts.plane.so/makeplane
|
||||
export PULL_POLICY=${PULL_POLICY:-if_not_present}
|
||||
export GH_REPO=makeplane/plane
|
||||
export RELEASE_DOWNLOAD_URL="https://github.com/$GH_REPO/releases/download"
|
||||
@@ -631,7 +631,7 @@ if [ -f "$DOCKER_ENV_PATH" ]; then
|
||||
CUSTOM_BUILD=$(getEnvValue "CUSTOM_BUILD" "$DOCKER_ENV_PATH")
|
||||
|
||||
if [ -z "$DOCKERHUB_USER" ]; then
|
||||
DOCKERHUB_USER=makeplane
|
||||
DOCKERHUB_USER=artifacts.plane.so/makeplane
|
||||
updateEnvFile "DOCKERHUB_USER" "$DOCKERHUB_USER" "$DOCKER_ENV_PATH"
|
||||
fi
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ SERVICE_FOLDER=plane-app
|
||||
SCRIPT_DIR=$PWD
|
||||
PLANE_INSTALL_DIR=$PWD/$SERVICE_FOLDER
|
||||
export APP_RELEASE="stable"
|
||||
export DOCKERHUB_USER=makeplane
|
||||
export DOCKERHUB_USER=artifacts.plane.so/makeplane
|
||||
|
||||
export GH_REPO=makeplane/plane
|
||||
export RELEASE_DOWNLOAD_URL="https://github.com/$GH_REPO/releases/download"
|
||||
@@ -596,7 +596,7 @@ if [ -f "$DOCKER_ENV_PATH" ]; then
|
||||
APP_RELEASE=$(getEnvValue "APP_RELEASE" "$DOCKER_ENV_PATH")
|
||||
|
||||
if [ -z "$DOCKERHUB_USER" ]; then
|
||||
DOCKERHUB_USER=makeplane
|
||||
DOCKERHUB_USER=artifacts.plane.so/makeplane
|
||||
updateEnvFile "DOCKERHUB_USER" "$DOCKERHUB_USER" "$DOCKER_ENV_PATH"
|
||||
fi
|
||||
|
||||
|
||||
@@ -60,4 +60,4 @@ GUNICORN_WORKERS=1
|
||||
MINIO_ENDPOINT_SSL=0
|
||||
|
||||
# API key rate limit
|
||||
API_KEY_RATE_LIMIT="60/minute"
|
||||
API_KEY_RATE_LIMIT=60/minute
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "live",
|
||||
"version": "0.25.3",
|
||||
"version": "0.26.0",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "A realtime collaborative server powers Plane's rich text editor",
|
||||
"main": "./src/server.ts",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "plane",
|
||||
"description": "Open-source project management that unlocks customer value",
|
||||
"repository": "https://github.com/makeplane/plane.git",
|
||||
"version": "0.25.3",
|
||||
"version": "0.26.0",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"workspaces": [
|
||||
@@ -24,7 +24,7 @@
|
||||
"devDependencies": {
|
||||
"prettier": "latest",
|
||||
"prettier-plugin-tailwindcss": "^0.5.4",
|
||||
"turbo": "^2.4.2"
|
||||
"turbo": "^2.5.0"
|
||||
},
|
||||
"resolutions": {
|
||||
"nanoid": "3.3.8",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@plane/constants",
|
||||
"version": "0.25.3",
|
||||
"version": "0.26.0",
|
||||
"private": true,
|
||||
"main": "./src/index.ts",
|
||||
"license": "AGPL-3.0"
|
||||
|
||||
@@ -8,3 +8,18 @@ export const ISSUE_REACTION_EMOJI_CODES = [
|
||||
"9992",
|
||||
"128064",
|
||||
];
|
||||
|
||||
export const RANDOM_EMOJI_CODES = [
|
||||
"8986",
|
||||
"9200",
|
||||
"128204",
|
||||
"127773",
|
||||
"127891",
|
||||
"128076",
|
||||
"128077",
|
||||
"128187",
|
||||
"128188",
|
||||
"128512",
|
||||
"128522",
|
||||
"128578",
|
||||
];
|
||||
|
||||
@@ -1,91 +1,97 @@
|
||||
import { TInboxDuplicateIssueDetails, TIssue } from "@plane/types";
|
||||
|
||||
export enum EInboxIssueCurrentTab {
|
||||
OPEN = "open",
|
||||
CLOSED = "closed",
|
||||
OPEN = "open",
|
||||
CLOSED = "closed",
|
||||
}
|
||||
|
||||
export enum EInboxIssueStatus {
|
||||
PENDING = -2,
|
||||
DECLINED = -1,
|
||||
SNOOZED = 0,
|
||||
ACCEPTED = 1,
|
||||
DUPLICATE = 2,
|
||||
PENDING = -2,
|
||||
DECLINED = -1,
|
||||
SNOOZED = 0,
|
||||
ACCEPTED = 1,
|
||||
DUPLICATE = 2,
|
||||
}
|
||||
|
||||
export enum EInboxIssueSource {
|
||||
IN_APP = "IN_APP",
|
||||
FORMS = "FORMS",
|
||||
EMAIL = "EMAIL",
|
||||
}
|
||||
|
||||
export type TInboxIssueCurrentTab = EInboxIssueCurrentTab;
|
||||
export type TInboxIssueStatus = EInboxIssueStatus;
|
||||
export type TInboxIssue = {
|
||||
id: string;
|
||||
status: TInboxIssueStatus;
|
||||
snoozed_till: Date | null;
|
||||
duplicate_to: string | undefined;
|
||||
source: string;
|
||||
issue: TIssue;
|
||||
created_by: string;
|
||||
duplicate_issue_detail: TInboxDuplicateIssueDetails | undefined;
|
||||
id: string;
|
||||
status: TInboxIssueStatus;
|
||||
snoozed_till: Date | null;
|
||||
duplicate_to: string | undefined;
|
||||
source: EInboxIssueSource | undefined;
|
||||
issue: TIssue;
|
||||
created_by: string;
|
||||
duplicate_issue_detail: TInboxDuplicateIssueDetails | undefined;
|
||||
};
|
||||
|
||||
export const INBOX_STATUS: {
|
||||
key: string;
|
||||
status: TInboxIssueStatus;
|
||||
i18n_title: string;
|
||||
i18n_description: () => string;
|
||||
key: string;
|
||||
status: TInboxIssueStatus;
|
||||
i18n_title: string;
|
||||
i18n_description: () => string;
|
||||
}[] = [
|
||||
{
|
||||
key: "pending",
|
||||
i18n_title: "inbox_issue.status.pending.title",
|
||||
status: EInboxIssueStatus.PENDING,
|
||||
i18n_description: () => `inbox_issue.status.pending.description`,
|
||||
},
|
||||
{
|
||||
key: "declined",
|
||||
i18n_title: "inbox_issue.status.declined.title",
|
||||
status: EInboxIssueStatus.DECLINED,
|
||||
i18n_description: () => `inbox_issue.status.declined.description`,
|
||||
},
|
||||
{
|
||||
key: "snoozed",
|
||||
i18n_title: "inbox_issue.status.snoozed.title",
|
||||
status: EInboxIssueStatus.SNOOZED,
|
||||
i18n_description: () => `inbox_issue.status.snoozed.description`,
|
||||
},
|
||||
{
|
||||
key: "accepted",
|
||||
i18n_title: "inbox_issue.status.accepted.title",
|
||||
status: EInboxIssueStatus.ACCEPTED,
|
||||
i18n_description: () => `inbox_issue.status.accepted.description`,
|
||||
},
|
||||
{
|
||||
key: "duplicate",
|
||||
i18n_title: "inbox_issue.status.duplicate.title",
|
||||
status: EInboxIssueStatus.DUPLICATE,
|
||||
i18n_description: () => `inbox_issue.status.duplicate.description`,
|
||||
},
|
||||
{
|
||||
key: "pending",
|
||||
i18n_title: "inbox_issue.status.pending.title",
|
||||
status: EInboxIssueStatus.PENDING,
|
||||
i18n_description: () => `inbox_issue.status.pending.description`,
|
||||
},
|
||||
{
|
||||
key: "declined",
|
||||
i18n_title: "inbox_issue.status.declined.title",
|
||||
status: EInboxIssueStatus.DECLINED,
|
||||
i18n_description: () => `inbox_issue.status.declined.description`,
|
||||
},
|
||||
{
|
||||
key: "snoozed",
|
||||
i18n_title: "inbox_issue.status.snoozed.title",
|
||||
status: EInboxIssueStatus.SNOOZED,
|
||||
i18n_description: () => `inbox_issue.status.snoozed.description`,
|
||||
},
|
||||
{
|
||||
key: "accepted",
|
||||
i18n_title: "inbox_issue.status.accepted.title",
|
||||
status: EInboxIssueStatus.ACCEPTED,
|
||||
i18n_description: () => `inbox_issue.status.accepted.description`,
|
||||
},
|
||||
{
|
||||
key: "duplicate",
|
||||
i18n_title: "inbox_issue.status.duplicate.title",
|
||||
status: EInboxIssueStatus.DUPLICATE,
|
||||
i18n_description: () => `inbox_issue.status.duplicate.description`,
|
||||
},
|
||||
];
|
||||
|
||||
export const INBOX_ISSUE_ORDER_BY_OPTIONS = [
|
||||
{
|
||||
key: "issue__created_at",
|
||||
i18n_label: "inbox_issue.order_by.created_at",
|
||||
},
|
||||
{
|
||||
key: "issue__updated_at",
|
||||
i18n_label: "inbox_issue.order_by.updated_at",
|
||||
},
|
||||
{
|
||||
key: "issue__sequence_id",
|
||||
i18n_label: "inbox_issue.order_by.id",
|
||||
},
|
||||
{
|
||||
key: "issue__created_at",
|
||||
i18n_label: "inbox_issue.order_by.created_at",
|
||||
},
|
||||
{
|
||||
key: "issue__updated_at",
|
||||
i18n_label: "inbox_issue.order_by.updated_at",
|
||||
},
|
||||
{
|
||||
key: "issue__sequence_id",
|
||||
i18n_label: "inbox_issue.order_by.id",
|
||||
},
|
||||
];
|
||||
|
||||
export const INBOX_ISSUE_SORT_BY_OPTIONS = [
|
||||
{
|
||||
key: "asc",
|
||||
i18n_label: "common.sort.asc",
|
||||
},
|
||||
{
|
||||
key: "desc",
|
||||
i18n_label: "common.sort.desc",
|
||||
},
|
||||
{
|
||||
key: "asc",
|
||||
i18n_label: "common.sort.asc",
|
||||
},
|
||||
{
|
||||
key: "desc",
|
||||
i18n_label: "common.sort.desc",
|
||||
},
|
||||
];
|
||||
|
||||
@@ -14,6 +14,7 @@ export * from "./state";
|
||||
export * from "./swr";
|
||||
export * from "./tab-indices";
|
||||
export * from "./user";
|
||||
export * from "./payment";
|
||||
export * from "./workspace";
|
||||
export * from "./stickies";
|
||||
export * from "./cycle";
|
||||
@@ -29,3 +30,5 @@ export * from "./event-tracker";
|
||||
export * from "./spreadsheet";
|
||||
export * from "./dashboard";
|
||||
export * from "./page";
|
||||
export * from "./emoji";
|
||||
export * from "./subscription";
|
||||
|
||||
@@ -41,6 +41,7 @@ export enum EIssueGroupBYServerToProperty {
|
||||
export enum EIssueServiceType {
|
||||
ISSUES = "issues",
|
||||
EPICS = "epics",
|
||||
WORK_ITEMS = "work-items",
|
||||
}
|
||||
|
||||
export enum EIssuesStoreType {
|
||||
|
||||
163
packages/constants/src/payment.ts
Normal file
163
packages/constants/src/payment.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
import { IPaymentProduct, TBillingFrequency, TProductBillingFrequency } from "@plane/types";
|
||||
|
||||
/**
|
||||
* Enum representing different product subscription types
|
||||
*/
|
||||
export enum EProductSubscriptionEnum {
|
||||
FREE = "FREE",
|
||||
ONE = "ONE",
|
||||
PRO = "PRO",
|
||||
BUSINESS = "BUSINESS",
|
||||
ENTERPRISE = "ENTERPRISE",
|
||||
}
|
||||
|
||||
/**
|
||||
* Default billing frequency for each product subscription type
|
||||
*/
|
||||
export const DEFAULT_PRODUCT_BILLING_FREQUENCY: TProductBillingFrequency = {
|
||||
[EProductSubscriptionEnum.FREE]: undefined,
|
||||
[EProductSubscriptionEnum.ONE]: undefined,
|
||||
[EProductSubscriptionEnum.PRO]: "month",
|
||||
[EProductSubscriptionEnum.BUSINESS]: "month",
|
||||
[EProductSubscriptionEnum.ENTERPRISE]: "month",
|
||||
};
|
||||
|
||||
/**
|
||||
* Subscription types that support billing frequency toggle (monthly/yearly)
|
||||
*/
|
||||
export const SUBSCRIPTION_WITH_BILLING_FREQUENCY = [
|
||||
EProductSubscriptionEnum.PRO,
|
||||
EProductSubscriptionEnum.BUSINESS,
|
||||
EProductSubscriptionEnum.ENTERPRISE,
|
||||
];
|
||||
|
||||
/**
|
||||
* Mapping of product subscription types to their respective payment product details
|
||||
* Used to provide information about each product's pricing and features
|
||||
*/
|
||||
export const PLANE_COMMUNITY_PRODUCTS: Record<string, IPaymentProduct> = {
|
||||
[EProductSubscriptionEnum.PRO]: {
|
||||
id: EProductSubscriptionEnum.PRO,
|
||||
name: "Plane Pro",
|
||||
description:
|
||||
"More views, more cycles powers, more pages features, new reports, and better dashboards are waiting to be unlocked.",
|
||||
type: "PRO",
|
||||
prices: [
|
||||
{
|
||||
id: `price_monthly_${EProductSubscriptionEnum.PRO}`,
|
||||
unit_amount: 800,
|
||||
recurring: "month",
|
||||
currency: "usd",
|
||||
workspace_amount: 800,
|
||||
product: EProductSubscriptionEnum.PRO,
|
||||
},
|
||||
{
|
||||
id: `price_yearly_${EProductSubscriptionEnum.PRO}`,
|
||||
unit_amount: 7200,
|
||||
recurring: "year",
|
||||
currency: "usd",
|
||||
workspace_amount: 7200,
|
||||
product: EProductSubscriptionEnum.PRO,
|
||||
},
|
||||
],
|
||||
payment_quantity: 1,
|
||||
is_active: true,
|
||||
},
|
||||
[EProductSubscriptionEnum.BUSINESS]: {
|
||||
id: EProductSubscriptionEnum.BUSINESS,
|
||||
name: "Plane Business",
|
||||
description:
|
||||
"The earliest packaging of Business at $10 a seat a month billed annually, $12 a seat a month billed monthly for Plane Cloud",
|
||||
type: "BUSINESS",
|
||||
prices: [
|
||||
{
|
||||
id: `price_yearly_${EProductSubscriptionEnum.BUSINESS}`,
|
||||
unit_amount: 0,
|
||||
recurring: "year",
|
||||
currency: "usd",
|
||||
workspace_amount: 0,
|
||||
product: EProductSubscriptionEnum.BUSINESS,
|
||||
},
|
||||
{
|
||||
id: `price_monthly_${EProductSubscriptionEnum.BUSINESS}`,
|
||||
unit_amount: 0,
|
||||
recurring: "month",
|
||||
currency: "usd",
|
||||
workspace_amount: 0,
|
||||
product: EProductSubscriptionEnum.BUSINESS,
|
||||
},
|
||||
],
|
||||
payment_quantity: 1,
|
||||
is_active: false,
|
||||
},
|
||||
[EProductSubscriptionEnum.ENTERPRISE]: {
|
||||
id: EProductSubscriptionEnum.ENTERPRISE,
|
||||
name: "Plane Enterprise",
|
||||
description: "",
|
||||
type: "ENTERPRISE",
|
||||
prices: [
|
||||
{
|
||||
id: `price_yearly_${EProductSubscriptionEnum.ENTERPRISE}`,
|
||||
unit_amount: 0,
|
||||
recurring: "year",
|
||||
currency: "usd",
|
||||
workspace_amount: 0,
|
||||
product: EProductSubscriptionEnum.ENTERPRISE,
|
||||
},
|
||||
{
|
||||
id: `price_monthly_${EProductSubscriptionEnum.ENTERPRISE}`,
|
||||
unit_amount: 0,
|
||||
recurring: "month",
|
||||
currency: "usd",
|
||||
workspace_amount: 0,
|
||||
product: EProductSubscriptionEnum.ENTERPRISE,
|
||||
},
|
||||
],
|
||||
payment_quantity: 1,
|
||||
is_active: false,
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* URL for the "Talk to Sales" page where users can contact sales team
|
||||
*/
|
||||
export const TALK_TO_SALES_URL = "https://plane.so/talk-to-sales";
|
||||
|
||||
/**
|
||||
* Mapping of subscription types to their respective upgrade/redirection URLs based on billing frequency
|
||||
* Used for self-hosted installations to redirect users to appropriate upgrade pages
|
||||
*/
|
||||
export const SUBSCRIPTION_REDIRECTION_URLS: Record<EProductSubscriptionEnum, Record<TBillingFrequency, string>> = {
|
||||
[EProductSubscriptionEnum.FREE]: {
|
||||
month: TALK_TO_SALES_URL,
|
||||
year: TALK_TO_SALES_URL,
|
||||
},
|
||||
[EProductSubscriptionEnum.ONE]: {
|
||||
month: TALK_TO_SALES_URL,
|
||||
year: TALK_TO_SALES_URL,
|
||||
},
|
||||
[EProductSubscriptionEnum.PRO]: {
|
||||
month: "https://app.plane.so/upgrade/pro/self-hosted?plan=month",
|
||||
year: "https://app.plane.so/upgrade/pro/self-hosted?plan=year",
|
||||
},
|
||||
[EProductSubscriptionEnum.BUSINESS]: {
|
||||
month: TALK_TO_SALES_URL,
|
||||
year: TALK_TO_SALES_URL,
|
||||
},
|
||||
[EProductSubscriptionEnum.ENTERPRISE]: {
|
||||
month: TALK_TO_SALES_URL,
|
||||
year: TALK_TO_SALES_URL,
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Mapping of subscription types to their respective marketing webpage URLs
|
||||
* Used to direct users to learn more about each plan's features and pricing
|
||||
*/
|
||||
export const SUBSCRIPTION_WEBPAGE_URLS: Record<EProductSubscriptionEnum, string> = {
|
||||
[EProductSubscriptionEnum.FREE]: TALK_TO_SALES_URL,
|
||||
[EProductSubscriptionEnum.ONE]: TALK_TO_SALES_URL,
|
||||
[EProductSubscriptionEnum.PRO]: "https://plane.so/pro",
|
||||
[EProductSubscriptionEnum.BUSINESS]: "https://plane.so/business",
|
||||
[EProductSubscriptionEnum.ENTERPRISE]: "https://plane.so/business",
|
||||
};
|
||||
@@ -1,5 +1,7 @@
|
||||
// icons
|
||||
import { TProjectAppliedDisplayFilterKeys, TProjectOrderByOptions } from "@plane/types";
|
||||
// plane imports
|
||||
import { IProject, TProjectAppliedDisplayFilterKeys, TProjectOrderByOptions } from "@plane/types";
|
||||
// local imports
|
||||
import { RANDOM_EMOJI_CODES } from "./emoji";
|
||||
|
||||
export type TNetworkChoiceIconKey = "Lock" | "Globe2";
|
||||
|
||||
@@ -132,3 +134,18 @@ export const PROJECT_ERROR_MESSAGES = {
|
||||
i18n_message: "workspace_projects.error.issue_delete",
|
||||
},
|
||||
};
|
||||
|
||||
export const DEFAULT_PROJECT_FORM_VALUES: Partial<IProject> = {
|
||||
cover_image_url: PROJECT_UNSPLASH_COVERS[Math.floor(Math.random() * PROJECT_UNSPLASH_COVERS.length)],
|
||||
description: "",
|
||||
logo_props: {
|
||||
in_use: "emoji",
|
||||
emoji: {
|
||||
value: RANDOM_EMOJI_CODES[Math.floor(Math.random() * RANDOM_EMOJI_CODES.length)],
|
||||
},
|
||||
},
|
||||
identifier: "",
|
||||
name: "",
|
||||
network: 2,
|
||||
project_lead: null,
|
||||
};
|
||||
|
||||
@@ -1,9 +1,4 @@
|
||||
export type TStateGroups =
|
||||
| "backlog"
|
||||
| "unstarted"
|
||||
| "started"
|
||||
| "completed"
|
||||
| "cancelled";
|
||||
export type TStateGroups = "backlog" | "unstarted" | "started" | "completed" | "cancelled";
|
||||
|
||||
export type TDraggableData = {
|
||||
groupKey: TStateGroups;
|
||||
@@ -14,40 +9,43 @@ export const STATE_GROUPS: {
|
||||
[key in TStateGroups]: {
|
||||
key: TStateGroups;
|
||||
label: string;
|
||||
defaultStateName: string;
|
||||
color: string;
|
||||
};
|
||||
} = {
|
||||
backlog: {
|
||||
key: "backlog",
|
||||
label: "Backlog",
|
||||
defaultStateName: "Backlog",
|
||||
color: "#d9d9d9",
|
||||
},
|
||||
unstarted: {
|
||||
key: "unstarted",
|
||||
label: "Unstarted",
|
||||
defaultStateName: "Todo",
|
||||
color: "#3f76ff",
|
||||
},
|
||||
started: {
|
||||
key: "started",
|
||||
label: "Started",
|
||||
defaultStateName: "In Progress",
|
||||
color: "#f59e0b",
|
||||
},
|
||||
completed: {
|
||||
key: "completed",
|
||||
label: "Completed",
|
||||
defaultStateName: "Done",
|
||||
color: "#16a34a",
|
||||
},
|
||||
cancelled: {
|
||||
key: "cancelled",
|
||||
label: "Canceled",
|
||||
defaultStateName: "Cancelled",
|
||||
color: "#dc2626",
|
||||
},
|
||||
};
|
||||
|
||||
export const ARCHIVABLE_STATE_GROUPS = [
|
||||
STATE_GROUPS.completed.key,
|
||||
STATE_GROUPS.cancelled.key,
|
||||
];
|
||||
export const ARCHIVABLE_STATE_GROUPS = [STATE_GROUPS.completed.key, STATE_GROUPS.cancelled.key];
|
||||
export const COMPLETED_STATE_GROUPS = [STATE_GROUPS.completed.key];
|
||||
export const PENDING_STATE_GROUPS = [
|
||||
STATE_GROUPS.backlog.key,
|
||||
|
||||
42
packages/constants/src/subscription.ts
Normal file
42
packages/constants/src/subscription.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
export const ENTERPRISE_PLAN_FEATURES = [
|
||||
"Private + managed deployments",
|
||||
"GAC",
|
||||
"LDAP support",
|
||||
"Databases + Formulas",
|
||||
"Unlimited and full Automation Flows",
|
||||
"Full-suite professional services",
|
||||
];
|
||||
|
||||
export const BUSINESS_PLAN_FEATURES = [
|
||||
"Project Templates",
|
||||
"Workflows + Approvals",
|
||||
"Decision + Loops Automation",
|
||||
"Custom Reports",
|
||||
"Nested Pages",
|
||||
"Intake Forms",
|
||||
];
|
||||
|
||||
export const PRO_PLAN_FEATURES = [
|
||||
"Dashboards + Reports",
|
||||
"Full Time Tracking + Bulk Ops",
|
||||
"Teamspaces",
|
||||
"Trigger And Action",
|
||||
"Wikis",
|
||||
"Popular integrations",
|
||||
];
|
||||
|
||||
export const ONE_PLAN_FEATURES = [
|
||||
"OIDC + SAML for SSO",
|
||||
"Active Cycles",
|
||||
"Real-time collab + public views and page",
|
||||
"Link pages in issues and vice-versa",
|
||||
"Time-tracking + limited bulk ops",
|
||||
"Docker, Kubernetes and more",
|
||||
];
|
||||
|
||||
export const FREE_PLAN_UPGRADE_FEATURES = [
|
||||
"OIDC + SAML for SSO",
|
||||
"Time Tracking and Bulk Ops",
|
||||
"Integrations",
|
||||
"Public Views and Pages",
|
||||
];
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@plane/editor",
|
||||
"version": "0.25.3",
|
||||
"version": "0.26.0",
|
||||
"description": "Core Editor that powers Plane",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user