mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
[WEB-2388] fix: workspace draft issues migration (#5749)
* fix: workspace draft issues * chore: changed the timezone key * chore: migration changes
This commit is contained in:
committed by
GitHub
parent
7317975b04
commit
d168fd4bfa
@@ -207,7 +207,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
# Incomplete Cycles
|
||||
if cycle_view == "incomplete":
|
||||
queryset = queryset.filter(
|
||||
Q(end_date__gte=timezone.now().date())
|
||||
Q(end_date__gte=timezone.now())
|
||||
| Q(end_date__isnull=True),
|
||||
)
|
||||
return self.paginate(
|
||||
@@ -311,7 +311,7 @@ class CycleAPIEndpoint(BaseAPIView):
|
||||
|
||||
if (
|
||||
cycle.end_date is not None
|
||||
and cycle.end_date < timezone.now().date()
|
||||
and cycle.end_date < timezone.now()
|
||||
):
|
||||
if "sort_order" in request_data:
|
||||
# Can only change sort order
|
||||
@@ -537,7 +537,7 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
cycle = Cycle.objects.get(
|
||||
pk=cycle_id, project_id=project_id, workspace__slug=slug
|
||||
)
|
||||
if cycle.end_date >= timezone.now().date():
|
||||
if cycle.end_date >= timezone.now():
|
||||
return Response(
|
||||
{"error": "Only completed cycles can be archived"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -1146,7 +1146,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
if (
|
||||
new_cycle.end_date is not None
|
||||
and new_cycle.end_date < timezone.now().date()
|
||||
and new_cycle.end_date < timezone.now()
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
|
||||
@@ -124,3 +124,9 @@ from .webhook import WebhookSerializer, WebhookLogSerializer
|
||||
from .dashboard import DashboardSerializer, WidgetSerializer
|
||||
|
||||
from .favorite import UserFavoriteSerializer
|
||||
|
||||
from .draft import (
|
||||
DraftIssueCreateSerializer,
|
||||
DraftIssueSerializer,
|
||||
DraftIssueDetailSerializer,
|
||||
)
|
||||
|
||||
278
apiserver/plane/app/serializers/draft.py
Normal file
278
apiserver/plane/app/serializers/draft.py
Normal file
@@ -0,0 +1,278 @@
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from plane.db.models import (
|
||||
User,
|
||||
Issue,
|
||||
Label,
|
||||
State,
|
||||
DraftIssue,
|
||||
DraftIssueAssignee,
|
||||
DraftIssueLabel,
|
||||
DraftIssueCycle,
|
||||
DraftIssueModule,
|
||||
)
|
||||
|
||||
|
||||
class DraftIssueCreateSerializer(BaseSerializer):
|
||||
# ids
|
||||
state_id = serializers.PrimaryKeyRelatedField(
|
||||
source="state",
|
||||
queryset=State.objects.all(),
|
||||
required=False,
|
||||
allow_null=True,
|
||||
)
|
||||
parent_id = serializers.PrimaryKeyRelatedField(
|
||||
source="parent",
|
||||
queryset=Issue.objects.all(),
|
||||
required=False,
|
||||
allow_null=True,
|
||||
)
|
||||
label_ids = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
assignee_ids = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = DraftIssue
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
assignee_ids = self.initial_data.get("assignee_ids")
|
||||
data["assignee_ids"] = assignee_ids if assignee_ids else []
|
||||
label_ids = self.initial_data.get("label_ids")
|
||||
data["label_ids"] = label_ids if label_ids else []
|
||||
return data
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("target_date", None) is not None
|
||||
and data.get("start_date", None) > data.get("target_date", None)
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"Start date cannot exceed target date"
|
||||
)
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
assignees = validated_data.pop("assignee_ids", None)
|
||||
labels = validated_data.pop("label_ids", None)
|
||||
modules = validated_data.pop("module_ids", None)
|
||||
cycle_id = self.initial_data.get("cycle_id", None)
|
||||
modules = self.initial_data.get("module_ids", None)
|
||||
|
||||
workspace_id = self.context["workspace_id"]
|
||||
|
||||
# Create Issue
|
||||
issue = DraftIssue.objects.create(
|
||||
**validated_data,
|
||||
workspace_id=workspace_id,
|
||||
)
|
||||
|
||||
# Issue Audit Users
|
||||
created_by_id = issue.created_by_id
|
||||
updated_by_id = issue.updated_by_id
|
||||
|
||||
if assignees is not None and len(assignees):
|
||||
DraftIssueAssignee.objects.bulk_create(
|
||||
[
|
||||
DraftIssueAssignee(
|
||||
assignee=user,
|
||||
issue=issue,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for user in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
if labels is not None and len(labels):
|
||||
DraftIssueLabel.objects.bulk_create(
|
||||
[
|
||||
DraftIssueLabel(
|
||||
label=label,
|
||||
issue=issue,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
if cycle_id is not None:
|
||||
DraftIssueCycle.objects.create(
|
||||
cycle_id=cycle_id,
|
||||
draft_issue=issue,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
|
||||
if modules is not None and len(modules):
|
||||
DraftIssueModule.objects.bulk_create(
|
||||
[
|
||||
DraftIssueModule(
|
||||
module_id=module_id,
|
||||
draft_issue=issue,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for module_id in modules
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
return issue
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
assignees = validated_data.pop("assignee_ids", None)
|
||||
labels = validated_data.pop("label_ids", None)
|
||||
cycle_id = self.initial_data.get("cycle_id", None)
|
||||
modules = self.initial_data.get("module_ids", None)
|
||||
|
||||
# Related models
|
||||
workspace_id = instance.workspace_id
|
||||
created_by_id = instance.created_by_id
|
||||
updated_by_id = instance.updated_by_id
|
||||
|
||||
if assignees is not None:
|
||||
DraftIssueAssignee.objects.filter(issue=instance).delete()
|
||||
DraftIssueAssignee.objects.bulk_create(
|
||||
[
|
||||
DraftIssueAssignee(
|
||||
assignee=user,
|
||||
issue=instance,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for user in assignees
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
if labels is not None:
|
||||
DraftIssueLabel.objects.filter(issue=instance).delete()
|
||||
DraftIssueLabel.objects.bulk_create(
|
||||
[
|
||||
DraftIssueLabel(
|
||||
label=label,
|
||||
issue=instance,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for label in labels
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
if cycle_id is not None:
|
||||
DraftIssueCycle.objects.filter(draft_issue=instance).delete()
|
||||
DraftIssueCycle.objects.create(
|
||||
cycle_id=cycle_id,
|
||||
draft_issue=instance,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
|
||||
if modules is not None:
|
||||
DraftIssueModule.objects.filter(draft_issue=instance).delete()
|
||||
DraftIssueModule.objects.bulk_create(
|
||||
[
|
||||
DraftIssueModule(
|
||||
module=module,
|
||||
draft_issue=instance,
|
||||
workspace_id=workspace_id,
|
||||
created_by_id=created_by_id,
|
||||
updated_by_id=updated_by_id,
|
||||
)
|
||||
for module in modules
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
# Time updation occurs even when other related models are updated
|
||||
instance.updated_at = timezone.now()
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class DraftIssueSerializer(BaseSerializer):
|
||||
# ids
|
||||
cycle_id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
module_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(),
|
||||
required=False,
|
||||
)
|
||||
|
||||
# Many to many
|
||||
label_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(),
|
||||
required=False,
|
||||
)
|
||||
assignee_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(),
|
||||
required=False,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = DraftIssue
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class DraftIssueDetailSerializer(DraftIssueSerializer):
|
||||
description_html = serializers.CharField()
|
||||
|
||||
class Meta(DraftIssueSerializer.Meta):
|
||||
fields = DraftIssueSerializer.Meta.fields + [
|
||||
"description_html",
|
||||
]
|
||||
read_only_fields = fields
|
||||
@@ -11,7 +11,6 @@ from plane.app.views import (
|
||||
IssueActivityEndpoint,
|
||||
IssueArchiveViewSet,
|
||||
IssueCommentViewSet,
|
||||
IssueDraftViewSet,
|
||||
IssueListEndpoint,
|
||||
IssueReactionViewSet,
|
||||
IssueRelationViewSet,
|
||||
@@ -290,28 +289,6 @@ urlpatterns = [
|
||||
name="issue-relation",
|
||||
),
|
||||
## End Issue Relation
|
||||
## Issue Drafts
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/",
|
||||
IssueDraftViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="project-issue-draft",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/<uuid:pk>/",
|
||||
IssueDraftViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="project-issue-draft",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/deleted-issues/",
|
||||
DeletedIssuesListViewSet.as_view(),
|
||||
|
||||
@@ -27,6 +27,7 @@ from plane.app.views import (
|
||||
WorkspaceCyclesEndpoint,
|
||||
WorkspaceFavoriteEndpoint,
|
||||
WorkspaceFavoriteGroupEndpoint,
|
||||
WorkspaceDraftIssueViewSet,
|
||||
)
|
||||
|
||||
|
||||
@@ -254,4 +255,25 @@ urlpatterns = [
|
||||
WorkspaceFavoriteGroupEndpoint.as_view(),
|
||||
name="workspace-user-favorites-groups",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/draft-issues/",
|
||||
WorkspaceDraftIssueViewSet.as_view(
|
||||
{
|
||||
"get": "list",
|
||||
"post": "create",
|
||||
}
|
||||
),
|
||||
name="workspace-draft-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/draft-issues/<uuid:pk>/",
|
||||
WorkspaceDraftIssueViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
),
|
||||
name="workspace-drafts-issues",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -40,6 +40,8 @@ from .workspace.base import (
|
||||
ExportWorkspaceUserActivityEndpoint,
|
||||
)
|
||||
|
||||
from .workspace.draft import WorkspaceDraftIssueViewSet
|
||||
|
||||
from .workspace.favorite import (
|
||||
WorkspaceFavoriteEndpoint,
|
||||
WorkspaceFavoriteGroupEndpoint,
|
||||
@@ -133,8 +135,6 @@ from .issue.comment import (
|
||||
CommentReactionViewSet,
|
||||
)
|
||||
|
||||
from .issue.draft import IssueDraftViewSet
|
||||
|
||||
from .issue.label import (
|
||||
LabelViewSet,
|
||||
BulkCreateIssueLabelsEndpoint,
|
||||
|
||||
@@ -604,7 +604,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||
pk=cycle_id, project_id=project_id, workspace__slug=slug
|
||||
)
|
||||
|
||||
if cycle.end_date >= timezone.now().date():
|
||||
if cycle.end_date >= timezone.now():
|
||||
return Response(
|
||||
{"error": "Only completed cycles can be archived"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
|
||||
@@ -308,7 +308,7 @@ class CycleViewSet(BaseViewSet):
|
||||
|
||||
if (
|
||||
cycle.end_date is not None
|
||||
and cycle.end_date < timezone.now().date()
|
||||
and cycle.end_date < timezone.now()
|
||||
):
|
||||
if "sort_order" in request_data:
|
||||
# Can only change sort order for a completed cycle``
|
||||
@@ -925,7 +925,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
|
||||
if (
|
||||
new_cycle.end_date is not None
|
||||
and new_cycle.end_date < timezone.now().date()
|
||||
and new_cycle.end_date < timezone.now()
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
|
||||
@@ -248,7 +248,7 @@ class CycleIssueViewSet(BaseViewSet):
|
||||
|
||||
if (
|
||||
cycle.end_date is not None
|
||||
and cycle.end_date < timezone.now().date()
|
||||
and cycle.end_date < timezone.now()
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
|
||||
@@ -40,8 +40,6 @@ from plane.db.models import (
|
||||
IssueLink,
|
||||
IssueRelation,
|
||||
Project,
|
||||
ProjectMember,
|
||||
User,
|
||||
Widget,
|
||||
WorkspaceMember,
|
||||
)
|
||||
|
||||
@@ -1,410 +0,0 @@
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.models import (
|
||||
Exists,
|
||||
F,
|
||||
Func,
|
||||
OuterRef,
|
||||
Prefetch,
|
||||
Q,
|
||||
UUIDField,
|
||||
Value,
|
||||
)
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils import timezone
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.app.permissions import ProjectEntityPermission
|
||||
from plane.app.serializers import (
|
||||
IssueCreateSerializer,
|
||||
IssueDetailSerializer,
|
||||
IssueFlatSerializer,
|
||||
IssueSerializer,
|
||||
)
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueAttachment,
|
||||
IssueLink,
|
||||
IssueReaction,
|
||||
IssueSubscriber,
|
||||
Project,
|
||||
ProjectMember,
|
||||
)
|
||||
from plane.utils.grouper import (
|
||||
issue_group_values,
|
||||
issue_on_results,
|
||||
issue_queryset_grouper,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
from plane.utils.paginator import (
|
||||
GroupedOffsetPaginator,
|
||||
SubGroupedOffsetPaginator,
|
||||
)
|
||||
from .. import BaseViewSet
|
||||
|
||||
|
||||
class IssueDraftViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
serializer_class = IssueFlatSerializer
|
||||
model = Issue
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
Issue.objects.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(is_draft=True)
|
||||
.filter(deleted_at__isnull=True)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels", "issue_module__module")
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(
|
||||
parent=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
).distinct()
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
def list(self, request, slug, project_id):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
|
||||
issue_queryset = self.get_queryset().filter(**filters)
|
||||
# Issue queryset
|
||||
issue_queryset, order_by_param = order_issue_queryset(
|
||||
issue_queryset=issue_queryset,
|
||||
order_by_param=order_by_param,
|
||||
)
|
||||
|
||||
# Group by
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
|
||||
# issue queryset
|
||||
issue_queryset = issue_queryset_grouper(
|
||||
queryset=issue_queryset,
|
||||
group_by=group_by,
|
||||
sub_group_by=sub_group_by,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
# Check group and sub group value paginate
|
||||
if sub_group_by:
|
||||
if group_by == sub_group_by:
|
||||
return Response(
|
||||
{
|
||||
"error": "Group by and sub group by cannot have same parameters"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
else:
|
||||
# group and sub group pagination
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=SubGroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
sub_group_by_fields=issue_group_values(
|
||||
field=sub_group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
sub_group_by_field_name=sub_group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
# Group Paginate
|
||||
else:
|
||||
# Group paginate
|
||||
return self.paginate(
|
||||
request=request,
|
||||
order_by=order_by_param,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by,
|
||||
issues=issues,
|
||||
sub_group_by=sub_group_by,
|
||||
),
|
||||
paginator_cls=GroupedOffsetPaginator,
|
||||
group_by_fields=issue_group_values(
|
||||
field=group_by,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
filters=filters,
|
||||
),
|
||||
group_by_field_name=group_by,
|
||||
count_filter=Q(
|
||||
Q(issue_inbox__status=1)
|
||||
| Q(issue_inbox__status=-1)
|
||||
| Q(issue_inbox__status=2)
|
||||
| Q(issue_inbox__isnull=True),
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
else:
|
||||
# List Paginate
|
||||
return self.paginate(
|
||||
order_by=order_by_param,
|
||||
request=request,
|
||||
queryset=issue_queryset,
|
||||
on_results=lambda issues: issue_on_results(
|
||||
group_by=group_by, issues=issues, sub_group_by=sub_group_by
|
||||
),
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
|
||||
serializer = IssueCreateSerializer(
|
||||
data=request.data,
|
||||
context={
|
||||
"project_id": project_id,
|
||||
"workspace_id": project.workspace_id,
|
||||
"default_assignee_id": project.default_assignee_id,
|
||||
},
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save(is_draft=True)
|
||||
|
||||
# Track the issue
|
||||
issue_activity.delay(
|
||||
type="issue_draft.activity.created",
|
||||
requested_data=json.dumps(
|
||||
self.request.data, cls=DjangoJSONEncoder
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(serializer.data.get("id", None)),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
|
||||
issue = (
|
||||
issue_queryset_grouper(
|
||||
queryset=self.get_queryset().filter(
|
||||
pk=serializer.data["id"]
|
||||
),
|
||||
group_by=None,
|
||||
sub_group_by=None,
|
||||
)
|
||||
.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
)
|
||||
.first()
|
||||
)
|
||||
return Response(issue, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
issue = self.get_queryset().filter(pk=pk).first()
|
||||
|
||||
if not issue:
|
||||
return Response(
|
||||
{"error": "Issue does not exist"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
serializer = IssueCreateSerializer(
|
||||
issue, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
issue_activity.delay(
|
||||
type="issue_draft.activity.updated",
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("pk", None)),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(issue).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def retrieve(self, request, slug, project_id, pk=None):
|
||||
issue = (
|
||||
self.get_queryset()
|
||||
.filter(pk=pk)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
queryset=IssueReaction.objects.select_related(
|
||||
"issue", "actor"
|
||||
),
|
||||
)
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_attachment",
|
||||
queryset=IssueAttachment.objects.select_related("issue"),
|
||||
)
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_link",
|
||||
queryset=IssueLink.objects.select_related("created_by"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
is_subscribed=Exists(
|
||||
IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_id=OuterRef("pk"),
|
||||
subscriber=request.user,
|
||||
)
|
||||
)
|
||||
)
|
||||
).first()
|
||||
|
||||
if not issue:
|
||||
return Response(
|
||||
{"error": "The required object does not exist."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
serializer = IssueDetailSerializer(issue, expand=self.expand)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def destroy(self, request, slug, project_id, pk=None):
|
||||
issue = Issue.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
if issue.created_by_id != request.user.id and (
|
||||
not ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member=request.user,
|
||||
role=20,
|
||||
project_id=project_id,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{"error": "Only admin or creator can delete the issue"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
issue.delete()
|
||||
issue_activity.delay(
|
||||
type="issue_draft.activity.deleted",
|
||||
requested_data=json.dumps({"issue_id": str(pk)}),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(pk),
|
||||
project_id=str(project_id),
|
||||
current_instance={},
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
236
apiserver/plane/app/views/workspace/draft.py
Normal file
236
apiserver/plane/app/views/workspace/draft.py
Normal file
@@ -0,0 +1,236 @@
|
||||
# Django imports
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import (
|
||||
F,
|
||||
Q,
|
||||
UUIDField,
|
||||
Value,
|
||||
)
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
|
||||
# Third Party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
# Module imports
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.app.serializers import (
|
||||
IssueCreateSerializer,
|
||||
DraftIssueCreateSerializer,
|
||||
DraftIssueSerializer,
|
||||
DraftIssueDetailSerializer,
|
||||
)
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
DraftIssue,
|
||||
Workspace,
|
||||
)
|
||||
from .. import BaseViewSet
|
||||
|
||||
|
||||
class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
|
||||
model = DraftIssue
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
@allow_permission(
|
||||
allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE"
|
||||
)
|
||||
def list(self, request, slug):
|
||||
issues = (
|
||||
DraftIssue.objects.filter(workspace__slug=slug)
|
||||
.filter(created_by=request.user)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "draft_issue_module__module"
|
||||
)
|
||||
.annotate(cycle_id=F("draft_issue_cycle__cycle_id"))
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"draft_issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(draft_issue_module__module_id__isnull=True)
|
||||
& Q(
|
||||
draft_issue_module__module__archived_at__isnull=True
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
.order_by("-created_at")
|
||||
)
|
||||
|
||||
serializer = DraftIssueSerializer(issues, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@allow_permission(
|
||||
allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE"
|
||||
)
|
||||
def create(self, request, slug):
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
serializer = DraftIssueCreateSerializer(
|
||||
data=request.data,
|
||||
context={
|
||||
"workspace_id": workspace.id,
|
||||
},
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@allow_permission(
|
||||
allowed_roles=[ROLE.ADMIN, ROLE.MEMBER],
|
||||
creator=True,
|
||||
model=Issue,
|
||||
level="WORKSPACE",
|
||||
)
|
||||
def partial_update(self, request, slug, pk):
|
||||
issue = (
|
||||
DraftIssue.objects.filter(workspace__slug=slug)
|
||||
.filter(pk=pk)
|
||||
.filter(created_by=request.user)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "draft_issue_module__module"
|
||||
)
|
||||
.annotate(cycle_id=F("draft_issue_cycle__cycle_id"))
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"draft_issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(draft_issue_module__module_id__isnull=True)
|
||||
& Q(
|
||||
draft_issue_module__module__archived_at__isnull=True
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not issue:
|
||||
return Response(
|
||||
{"error": "Issue not found"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
serializer = IssueCreateSerializer(
|
||||
issue, data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@allow_permission(
|
||||
allowed_roles=[ROLE.ADMIN],
|
||||
creator=True,
|
||||
model=Issue,
|
||||
level="WORKSPACE",
|
||||
)
|
||||
def retrieve(self, request, slug, pk=None):
|
||||
issue = (
|
||||
DraftIssue.objects.filter(workspace__slug=slug)
|
||||
.filter(pk=pk)
|
||||
.filter(created_by=request.user)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related(
|
||||
"assignees", "labels", "draft_issue_module__module"
|
||||
)
|
||||
.annotate(cycle_id=F("draft_issue_cycle__cycle_id"))
|
||||
.filter(pk=pk)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True)
|
||||
& Q(assignees__member_project__is_active=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"draft_issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(draft_issue_module__module_id__isnull=True)
|
||||
& Q(
|
||||
draft_issue_module__module__archived_at__isnull=True
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
).first()
|
||||
|
||||
if not issue:
|
||||
return Response(
|
||||
{"error": "The required object does not exist."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
serializer = DraftIssueDetailSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@allow_permission(
|
||||
allowed_roles=[ROLE.ADMIN],
|
||||
creator=True,
|
||||
model=Issue,
|
||||
level="WORKSPACE",
|
||||
)
|
||||
def destroy(self, request, slug, pk=None):
|
||||
draft_issue = DraftIssue.objects.get(workspace__slug=slug, pk=pk)
|
||||
draft_issue.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -504,7 +504,7 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
|
||||
|
||||
upcoming_cycles = CycleIssue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
cycle__start_date__gt=timezone.now().date(),
|
||||
cycle__start_date__gt=timezone.now(),
|
||||
issue__assignees__in=[
|
||||
user_id,
|
||||
],
|
||||
@@ -512,8 +512,8 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
|
||||
|
||||
present_cycle = CycleIssue.objects.filter(
|
||||
workspace__slug=slug,
|
||||
cycle__start_date__lt=timezone.now().date(),
|
||||
cycle__end_date__gt=timezone.now().date(),
|
||||
cycle__start_date__lt=timezone.now(),
|
||||
cycle__end_date__gt=timezone.now(),
|
||||
issue__assignees__in=[
|
||||
user_id,
|
||||
],
|
||||
|
||||
@@ -42,14 +42,12 @@ def archive_old_issues():
|
||||
),
|
||||
Q(issue_cycle__isnull=True)
|
||||
| (
|
||||
Q(issue_cycle__cycle__end_date__lt=timezone.now().date())
|
||||
Q(issue_cycle__cycle__end_date__lt=timezone.now())
|
||||
& Q(issue_cycle__isnull=False)
|
||||
),
|
||||
Q(issue_module__isnull=True)
|
||||
| (
|
||||
Q(
|
||||
issue_module__module__target_date__lt=timezone.now().date()
|
||||
)
|
||||
Q(issue_module__module__target_date__lt=timezone.now())
|
||||
& Q(issue_module__isnull=False)
|
||||
),
|
||||
).filter(
|
||||
@@ -122,14 +120,12 @@ def close_old_issues():
|
||||
),
|
||||
Q(issue_cycle__isnull=True)
|
||||
| (
|
||||
Q(issue_cycle__cycle__end_date__lt=timezone.now().date())
|
||||
Q(issue_cycle__cycle__end_date__lt=timezone.now())
|
||||
& Q(issue_cycle__isnull=False)
|
||||
),
|
||||
Q(issue_module__isnull=True)
|
||||
| (
|
||||
Q(
|
||||
issue_module__module__target_date__lt=timezone.now().date()
|
||||
)
|
||||
Q(issue_module__module__target_date__lt=timezone.now())
|
||||
& Q(issue_module__isnull=False)
|
||||
),
|
||||
).filter(
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -5,6 +5,7 @@ from .base import BaseModel
|
||||
from .cycle import Cycle, CycleFavorite, CycleIssue, CycleUserProperties
|
||||
from .dashboard import Dashboard, DashboardWidget, Widget
|
||||
from .deploy_board import DeployBoard
|
||||
from .draft import DraftIssue, DraftIssueAssignee, DraftIssueLabel, DraftIssueModule, DraftIssueCycle
|
||||
from .estimate import Estimate, EstimatePoint
|
||||
from .exporter import ExporterHistory
|
||||
from .importer import Importer
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
# Python imports
|
||||
import pytz
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
@@ -55,10 +58,12 @@ class Cycle(ProjectBaseModel):
|
||||
description = models.TextField(
|
||||
verbose_name="Cycle Description", blank=True
|
||||
)
|
||||
start_date = models.DateField(
|
||||
start_date = models.DateTimeField(
|
||||
verbose_name="Start Date", blank=True, null=True
|
||||
)
|
||||
end_date = models.DateField(verbose_name="End Date", blank=True, null=True)
|
||||
end_date = models.DateTimeField(
|
||||
verbose_name="End Date", blank=True, null=True
|
||||
)
|
||||
owned_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.CASCADE,
|
||||
@@ -71,6 +76,12 @@ class Cycle(ProjectBaseModel):
|
||||
progress_snapshot = models.JSONField(default=dict)
|
||||
archived_at = models.DateTimeField(null=True)
|
||||
logo_props = models.JSONField(default=dict)
|
||||
# timezone
|
||||
TIMEZONE_CHOICES = tuple(zip(pytz.all_timezones, pytz.all_timezones))
|
||||
timezone = models.CharField(
|
||||
max_length=255, default="UTC", choices=TIMEZONE_CHOICES
|
||||
)
|
||||
version = models.IntegerField(default=1)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Cycle"
|
||||
|
||||
253
apiserver/plane/db/models/draft.py
Normal file
253
apiserver/plane/db/models/draft.py
Normal file
@@ -0,0 +1,253 @@
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
|
||||
# Module imports
|
||||
from plane.utils.html_processor import strip_tags
|
||||
|
||||
from .workspace import WorkspaceBaseModel
|
||||
|
||||
|
||||
class DraftIssue(WorkspaceBaseModel):
|
||||
PRIORITY_CHOICES = (
|
||||
("urgent", "Urgent"),
|
||||
("high", "High"),
|
||||
("medium", "Medium"),
|
||||
("low", "Low"),
|
||||
("none", "None"),
|
||||
)
|
||||
parent = models.ForeignKey(
|
||||
"db.Issue",
|
||||
on_delete=models.CASCADE,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="draft_parent_issue",
|
||||
)
|
||||
state = models.ForeignKey(
|
||||
"db.State",
|
||||
on_delete=models.CASCADE,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="state_draft_issue",
|
||||
)
|
||||
estimate_point = models.ForeignKey(
|
||||
"db.EstimatePoint",
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="draft_issue_estimates",
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
name = models.CharField(
|
||||
max_length=255, verbose_name="Issue Name", blank=True, null=True
|
||||
)
|
||||
description = models.JSONField(blank=True, default=dict)
|
||||
description_html = models.TextField(blank=True, default="<p></p>")
|
||||
description_stripped = models.TextField(blank=True, null=True)
|
||||
description_binary = models.BinaryField(null=True)
|
||||
priority = models.CharField(
|
||||
max_length=30,
|
||||
choices=PRIORITY_CHOICES,
|
||||
verbose_name="Issue Priority",
|
||||
default="none",
|
||||
)
|
||||
start_date = models.DateField(null=True, blank=True)
|
||||
target_date = models.DateField(null=True, blank=True)
|
||||
assignees = models.ManyToManyField(
|
||||
settings.AUTH_USER_MODEL,
|
||||
blank=True,
|
||||
related_name="draft_assignee",
|
||||
through="DraftIssueAssignee",
|
||||
through_fields=("draft_issue", "assignee"),
|
||||
)
|
||||
labels = models.ManyToManyField(
|
||||
"db.Label",
|
||||
blank=True,
|
||||
related_name="draft_labels",
|
||||
through="DraftIssueLabel",
|
||||
)
|
||||
sort_order = models.FloatField(default=65535)
|
||||
completed_at = models.DateTimeField(null=True)
|
||||
external_source = models.CharField(max_length=255, null=True, blank=True)
|
||||
external_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
type = models.ForeignKey(
|
||||
"db.IssueType",
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="draft_issue_type",
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "DraftIssue"
|
||||
verbose_name_plural = "DraftIssues"
|
||||
db_table = "draft_issues"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.state is None:
|
||||
try:
|
||||
from plane.db.models import State
|
||||
|
||||
default_state = State.objects.filter(
|
||||
~models.Q(is_triage=True),
|
||||
project=self.project,
|
||||
default=True,
|
||||
).first()
|
||||
if default_state is None:
|
||||
random_state = State.objects.filter(
|
||||
~models.Q(is_triage=True), project=self.project
|
||||
).first()
|
||||
self.state = random_state
|
||||
else:
|
||||
self.state = default_state
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
from plane.db.models import State
|
||||
|
||||
if self.state.group == "completed":
|
||||
self.completed_at = timezone.now()
|
||||
else:
|
||||
self.completed_at = None
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if self._state.adding:
|
||||
# Strip the html tags using html parser
|
||||
self.description_stripped = (
|
||||
None
|
||||
if (
|
||||
self.description_html == ""
|
||||
or self.description_html is None
|
||||
)
|
||||
else strip_tags(self.description_html)
|
||||
)
|
||||
largest_sort_order = DraftIssue.objects.filter(
|
||||
project=self.project, state=self.state
|
||||
).aggregate(largest=models.Max("sort_order"))["largest"]
|
||||
if largest_sort_order is not None:
|
||||
self.sort_order = largest_sort_order + 10000
|
||||
|
||||
super(DraftIssue, self).save(*args, **kwargs)
|
||||
|
||||
else:
|
||||
# Strip the html tags using html parser
|
||||
self.description_stripped = (
|
||||
None
|
||||
if (
|
||||
self.description_html == ""
|
||||
or self.description_html is None
|
||||
)
|
||||
else strip_tags(self.description_html)
|
||||
)
|
||||
super(DraftIssue, self).save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the draft issue"""
|
||||
return f"{self.name} <{self.project.name}>"
|
||||
|
||||
|
||||
class DraftIssueAssignee(WorkspaceBaseModel):
|
||||
draft_issue = models.ForeignKey(
|
||||
DraftIssue,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="draft_issue_assignee",
|
||||
)
|
||||
assignee = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="draft_issue_assignee",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["draft_issue", "assignee", "deleted_at"]
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["draft_issue", "assignee"],
|
||||
condition=models.Q(deleted_at__isnull=True),
|
||||
name="draft_issue_assignee_unique_issue_assignee_when_deleted_at_null",
|
||||
)
|
||||
]
|
||||
verbose_name = "Draft Issue Assignee"
|
||||
verbose_name_plural = "Draft Issue Assignees"
|
||||
db_table = "draft_issue_assignees"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.draft_issue.name} {self.assignee.email}"
|
||||
|
||||
|
||||
class DraftIssueLabel(WorkspaceBaseModel):
|
||||
draft_issue = models.ForeignKey(
|
||||
"db.DraftIssue",
|
||||
on_delete=models.CASCADE,
|
||||
related_name="draft_label_issue",
|
||||
)
|
||||
label = models.ForeignKey(
|
||||
"db.Label", on_delete=models.CASCADE, related_name="draft_label_issue"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Draft Issue Label"
|
||||
verbose_name_plural = "Draft Issue Labels"
|
||||
db_table = "draft_issue_labels"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.draft_issue.name} {self.label.name}"
|
||||
|
||||
|
||||
class DraftIssueModule(WorkspaceBaseModel):
|
||||
module = models.ForeignKey(
|
||||
"db.Module",
|
||||
on_delete=models.CASCADE,
|
||||
related_name="draft_issue_module",
|
||||
)
|
||||
draft_issue = models.ForeignKey(
|
||||
"db.DraftIssue",
|
||||
on_delete=models.CASCADE,
|
||||
related_name="draft_issue_module",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["draft_issue", "module", "deleted_at"]
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["draft_issue", "module"],
|
||||
condition=models.Q(deleted_at__isnull=True),
|
||||
name="module_draft_issue_unique_issue_module_when_deleted_at_null",
|
||||
)
|
||||
]
|
||||
verbose_name = "Draft Issue Module"
|
||||
verbose_name_plural = "Draft Issue Modules"
|
||||
db_table = "draft_issue_modules"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.module.name} {self.draft_issue.name}"
|
||||
|
||||
|
||||
class DraftIssueCycle(WorkspaceBaseModel):
|
||||
"""
|
||||
Draft Issue Cycles
|
||||
"""
|
||||
|
||||
draft_issue = models.OneToOneField(
|
||||
"db.DraftIssue",
|
||||
on_delete=models.CASCADE,
|
||||
related_name="draft_issue_cycle",
|
||||
)
|
||||
cycle = models.ForeignKey(
|
||||
"db.Cycle", on_delete=models.CASCADE, related_name="draft_issue_cycle"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Draft Issue Cycle"
|
||||
verbose_name_plural = "Draft Issue Cycles"
|
||||
db_table = "draft_issue_cycles"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.cycle}"
|
||||
@@ -1,4 +1,5 @@
|
||||
# Python imports
|
||||
import pytz
|
||||
from uuid import uuid4
|
||||
|
||||
# Django imports
|
||||
@@ -7,7 +8,7 @@ from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
|
||||
# Modeule imports
|
||||
# Module imports
|
||||
from plane.db.mixins import AuditModel
|
||||
|
||||
# Module imports
|
||||
@@ -119,6 +120,11 @@ class Project(BaseModel):
|
||||
related_name="default_state",
|
||||
)
|
||||
archived_at = models.DateTimeField(null=True)
|
||||
# timezone
|
||||
TIMEZONE_CHOICES = tuple(zip(pytz.all_timezones, pytz.all_timezones))
|
||||
timezone = models.CharField(
|
||||
max_length=255, default="UTC", choices=TIMEZONE_CHOICES
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the project"""
|
||||
|
||||
@@ -163,7 +163,7 @@ def burndown_plot(
|
||||
if queryset.end_date and queryset.start_date:
|
||||
# Get all dates between the two dates
|
||||
date_range = [
|
||||
queryset.start_date + timedelta(days=x)
|
||||
(queryset.start_date + timedelta(days=x)).date()
|
||||
for x in range(
|
||||
(queryset.end_date - queryset.start_date).days + 1
|
||||
)
|
||||
@@ -203,7 +203,7 @@ def burndown_plot(
|
||||
if module_id:
|
||||
# Get all dates between the two dates
|
||||
date_range = [
|
||||
queryset.start_date + timedelta(days=x)
|
||||
(queryset.start_date + timedelta(days=x)).date()
|
||||
for x in range(
|
||||
(queryset.target_date - queryset.start_date).days + 1
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user