mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Compare commits
9 Commits
feat-home-
...
chore-issu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
954167c5ff | ||
|
|
4d12a94b46 | ||
|
|
b0f20f59eb | ||
|
|
b4d008d4bb | ||
|
|
daf0d9d3a5 | ||
|
|
7cfb1f8ace | ||
|
|
3654f24f8e | ||
|
|
7f66604cc2 | ||
|
|
d8cf08df91 |
120
apiserver/plane/bgtasks/issue_description_version_sync.py
Normal file
120
apiserver/plane/bgtasks/issue_description_version_sync.py
Normal file
@@ -0,0 +1,120 @@
|
||||
# Python imports
|
||||
from typing import Optional
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.db import transaction
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import Issue, IssueDescriptionVersion, ProjectMember
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
|
||||
def get_owner_id(issue: Issue) -> Optional[int]:
|
||||
"""Get the owner ID of the issue"""
|
||||
|
||||
if issue.updated_by_id:
|
||||
return issue.updated_by_id
|
||||
|
||||
if issue.created_by_id:
|
||||
return issue.created_by_id
|
||||
|
||||
# Find project admin as fallback
|
||||
project_member = ProjectMember.objects.filter(
|
||||
project_id=issue.project_id,
|
||||
role=20, # Admin role
|
||||
).first()
|
||||
|
||||
return project_member.member_id if project_member else None
|
||||
|
||||
|
||||
@shared_task
|
||||
def sync_issue_description_version(batch_size=5000, offset=0, countdown=300):
|
||||
"""Task to create IssueDescriptionVersion records for existing Issues in batches"""
|
||||
try:
|
||||
with transaction.atomic():
|
||||
base_query = Issue.objects
|
||||
total_issues_count = base_query.count()
|
||||
|
||||
if total_issues_count == 0:
|
||||
return
|
||||
|
||||
# Calculate batch range
|
||||
end_offset = min(offset + batch_size, total_issues_count)
|
||||
|
||||
# Fetch issues with related data
|
||||
issues_batch = (
|
||||
base_query.order_by("created_at")
|
||||
.select_related("workspace", "project")
|
||||
.only(
|
||||
"id",
|
||||
"workspace_id",
|
||||
"project_id",
|
||||
"created_by_id",
|
||||
"updated_by_id",
|
||||
"description_binary",
|
||||
"description_html",
|
||||
"description_stripped",
|
||||
"description",
|
||||
)[offset:end_offset]
|
||||
)
|
||||
|
||||
if not issues_batch:
|
||||
return
|
||||
|
||||
version_objects = []
|
||||
for issue in issues_batch:
|
||||
# Validate required fields
|
||||
if not issue.workspace_id or not issue.project_id:
|
||||
print(f"Skipping {issue.id} - missing workspace_id or project_id")
|
||||
continue
|
||||
|
||||
# Determine owned_by_id
|
||||
owned_by_id = get_owner_id(issue)
|
||||
if owned_by_id is None:
|
||||
print(f"Skipping issue {issue.id} - missing owned_by")
|
||||
continue
|
||||
|
||||
# Create version object
|
||||
version_objects.append(
|
||||
IssueDescriptionVersion(
|
||||
workspace_id=issue.workspace_id,
|
||||
project_id=issue.project_id,
|
||||
created_by_id=issue.created_by_id,
|
||||
updated_by_id=issue.updated_by_id,
|
||||
owned_by_id=owned_by_id,
|
||||
last_saved_at=timezone.now(),
|
||||
issue_id=issue.id,
|
||||
description_binary=issue.description_binary,
|
||||
description_html=issue.description_html,
|
||||
description_stripped=issue.description_stripped,
|
||||
description_json=issue.description,
|
||||
)
|
||||
)
|
||||
|
||||
# Bulk create version objects
|
||||
if version_objects:
|
||||
IssueDescriptionVersion.objects.bulk_create(version_objects)
|
||||
|
||||
# Schedule next batch if needed
|
||||
if end_offset < total_issues_count:
|
||||
sync_issue_description_version.apply_async(
|
||||
kwargs={
|
||||
"batch_size": batch_size,
|
||||
"offset": end_offset,
|
||||
"countdown": countdown,
|
||||
},
|
||||
countdown=countdown,
|
||||
)
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return
|
||||
|
||||
|
||||
@shared_task
|
||||
def schedule_issue_description_version(batch_size=5000, countdown=300):
|
||||
sync_issue_description_version.delay(batch_size=batch_size, countdown=countdown)
|
||||
85
apiserver/plane/bgtasks/issue_description_version_task.py
Normal file
85
apiserver/plane/bgtasks/issue_description_version_task.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from celery import shared_task
|
||||
from django.db import transaction
|
||||
from django.utils import timezone
|
||||
from typing import Optional, Dict, Any
|
||||
import json
|
||||
|
||||
from plane.db.models import Issue, IssueDescriptionVersion
|
||||
from plane.utils.logging import log_exception
|
||||
|
||||
|
||||
def should_update_existing_version(
|
||||
version: IssueDescriptionVersion, user_id: str, max_time_difference: int = 600
|
||||
) -> bool:
|
||||
if not version:
|
||||
return
|
||||
|
||||
time_difference = (timezone.now() - version.last_saved_at).total_seconds()
|
||||
return (
|
||||
str(version.owned_by_id) == str(user_id)
|
||||
and time_difference <= max_time_difference
|
||||
)
|
||||
|
||||
|
||||
def update_existing_version(
|
||||
version: IssueDescriptionVersion, description_data: Dict[str, Any]
|
||||
) -> None:
|
||||
version.description_json = description_data.get("description")
|
||||
version.description_html = description_data.get("description_html")
|
||||
version.description_binary = description_data.get("description_binary")
|
||||
version.description_stripped = description_data.get("description_stripped")
|
||||
version.last_saved_at = timezone.now()
|
||||
|
||||
version.save(
|
||||
update_fields=[
|
||||
"description_json",
|
||||
"description_html",
|
||||
"description_binary",
|
||||
"description_stripped",
|
||||
"last_saved_at",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@shared_task
|
||||
def issue_description_version_task(
|
||||
updated_issue: Optional[str], issue_id: str, user_id: str
|
||||
) -> Optional[bool]:
|
||||
try:
|
||||
# Parse updated issue data
|
||||
current_issue: Dict = json.loads(updated_issue) if updated_issue else {}
|
||||
|
||||
# Get current issue
|
||||
issue = Issue.objects.get(id=issue_id)
|
||||
|
||||
# Check if description has changed
|
||||
if current_issue.get("description_html") == issue.description_html:
|
||||
return
|
||||
|
||||
with transaction.atomic():
|
||||
# Get latest version
|
||||
latest_version = (
|
||||
IssueDescriptionVersion.objects.filter(issue_id=issue_id)
|
||||
.order_by("-last_saved_at")
|
||||
.first()
|
||||
)
|
||||
|
||||
# Determine whether to update existing or create new version
|
||||
if should_update_existing_version(latest_version, user_id):
|
||||
update_existing_version(latest_version, current_issue)
|
||||
else:
|
||||
IssueDescriptionVersion.log_issue_description_version(
|
||||
current_issue, user_id
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
except Issue.DoesNotExist:
|
||||
# Issue no longer exists, skip processing
|
||||
return
|
||||
except json.JSONDecodeError as e:
|
||||
log_exception(f"Invalid JSON for updated_issue: {e}")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(f"Error processing issue description version: {e}")
|
||||
return
|
||||
255
apiserver/plane/bgtasks/issue_version_sync.py
Normal file
255
apiserver/plane/bgtasks/issue_version_sync.py
Normal file
@@ -0,0 +1,255 @@
|
||||
# Python imports
|
||||
import json
|
||||
from typing import Optional, List, Dict
|
||||
from uuid import UUID
|
||||
from itertools import groupby
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.db import transaction
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import (
|
||||
Issue,
|
||||
IssueVersion,
|
||||
ProjectMember,
|
||||
CycleIssue,
|
||||
ModuleIssue,
|
||||
IssueActivity,
|
||||
IssueAssignee,
|
||||
IssueLabel,
|
||||
)
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
|
||||
@shared_task
|
||||
def issue_task(updated_issue, issue_id, user_id):
|
||||
try:
|
||||
current_issue = json.loads(updated_issue) if updated_issue else {}
|
||||
issue = Issue.objects.get(id=issue_id)
|
||||
|
||||
updated_current_issue = {}
|
||||
for key, value in current_issue.items():
|
||||
if getattr(issue, key) != value:
|
||||
updated_current_issue[key] = value
|
||||
|
||||
if updated_current_issue:
|
||||
issue_version = (
|
||||
IssueVersion.objects.filter(issue_id=issue_id)
|
||||
.order_by("-last_saved_at")
|
||||
.first()
|
||||
)
|
||||
|
||||
if (
|
||||
issue_version
|
||||
and str(issue_version.owned_by) == str(user_id)
|
||||
and (timezone.now() - issue_version.last_saved_at).total_seconds()
|
||||
<= 600
|
||||
):
|
||||
for key, value in updated_current_issue.items():
|
||||
setattr(issue_version, key, value)
|
||||
issue_version.last_saved_at = timezone.now()
|
||||
issue_version.save(
|
||||
update_fields=list(updated_current_issue.keys()) + ["last_saved_at"]
|
||||
)
|
||||
else:
|
||||
IssueVersion.log_issue_version(issue, user_id)
|
||||
|
||||
return
|
||||
except Issue.DoesNotExist:
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return
|
||||
|
||||
|
||||
def get_owner_id(issue: Issue) -> Optional[int]:
|
||||
"""Get the owner ID of the issue"""
|
||||
|
||||
if issue.updated_by_id:
|
||||
return issue.updated_by_id
|
||||
|
||||
if issue.created_by_id:
|
||||
return issue.created_by_id
|
||||
|
||||
# Find project admin as fallback
|
||||
project_member = ProjectMember.objects.filter(
|
||||
project_id=issue.project_id,
|
||||
role=20, # Admin role
|
||||
).first()
|
||||
|
||||
return project_member.member_id if project_member else None
|
||||
|
||||
|
||||
def get_related_data(issue_ids: List[UUID]) -> Dict:
|
||||
"""Get related data for the given issue IDs"""
|
||||
|
||||
cycle_issues = {
|
||||
ci.issue_id: ci.cycle_id
|
||||
for ci in CycleIssue.objects.filter(issue_id__in=issue_ids)
|
||||
}
|
||||
|
||||
# Get assignees with proper grouping
|
||||
assignee_records = list(
|
||||
IssueAssignee.objects.filter(issue_id__in=issue_ids)
|
||||
.values_list("issue_id", "assignee_id")
|
||||
.order_by("issue_id")
|
||||
)
|
||||
assignees = {}
|
||||
for issue_id, group in groupby(assignee_records, key=lambda x: x[0]):
|
||||
assignees[issue_id] = [str(g[1]) for g in group]
|
||||
|
||||
# Get labels with proper grouping
|
||||
label_records = list(
|
||||
IssueLabel.objects.filter(issue_id__in=issue_ids)
|
||||
.values_list("issue_id", "label_id")
|
||||
.order_by("issue_id")
|
||||
)
|
||||
labels = {}
|
||||
for issue_id, group in groupby(label_records, key=lambda x: x[0]):
|
||||
labels[issue_id] = [str(g[1]) for g in group]
|
||||
|
||||
# Get modules with proper grouping
|
||||
module_records = list(
|
||||
ModuleIssue.objects.filter(issue_id__in=issue_ids)
|
||||
.values_list("issue_id", "module_id")
|
||||
.order_by("issue_id")
|
||||
)
|
||||
modules = {}
|
||||
for issue_id, group in groupby(module_records, key=lambda x: x[0]):
|
||||
modules[issue_id] = [str(g[1]) for g in group]
|
||||
|
||||
# Get latest activities
|
||||
latest_activities = {}
|
||||
activities = IssueActivity.objects.filter(issue_id__in=issue_ids).order_by(
|
||||
"issue_id", "-created_at"
|
||||
)
|
||||
for issue_id, activities_group in groupby(activities, key=lambda x: x.issue_id):
|
||||
first_activity = next(activities_group, None)
|
||||
if first_activity:
|
||||
latest_activities[issue_id] = first_activity.id
|
||||
|
||||
return {
|
||||
"cycle_issues": cycle_issues,
|
||||
"assignees": assignees,
|
||||
"labels": labels,
|
||||
"modules": modules,
|
||||
"activities": latest_activities,
|
||||
}
|
||||
|
||||
|
||||
def create_issue_version(issue: Issue, related_data: Dict) -> Optional[IssueVersion]:
|
||||
"""Create IssueVersion object from the given issue and related data"""
|
||||
|
||||
try:
|
||||
if not issue.workspace_id or not issue.project_id:
|
||||
print(f"Skipping issue {issue.id} - missing workspace_id or project_id")
|
||||
return None
|
||||
|
||||
owned_by_id = get_owner_id(issue)
|
||||
if owned_by_id is None:
|
||||
print(f"Skipping issue {issue.id} - missing owned_by")
|
||||
return None
|
||||
|
||||
return IssueVersion(
|
||||
workspace_id=issue.workspace_id,
|
||||
project_id=issue.project_id,
|
||||
created_by_id=issue.created_by_id,
|
||||
updated_by_id=issue.updated_by_id,
|
||||
owned_by_id=owned_by_id,
|
||||
last_saved_at=timezone.now(),
|
||||
activity_id=related_data["activities"].get(issue.id),
|
||||
properties=getattr(issue, "properties", {}),
|
||||
meta=getattr(issue, "meta", {}),
|
||||
issue_id=issue.id,
|
||||
parent=issue.parent_id,
|
||||
state=issue.state_id,
|
||||
point=issue.point,
|
||||
estimate_point=issue.estimate_point_id,
|
||||
name=issue.name,
|
||||
priority=issue.priority,
|
||||
start_date=issue.start_date,
|
||||
target_date=issue.target_date,
|
||||
assignees=related_data["assignees"].get(issue.id, []),
|
||||
sequence_id=issue.sequence_id,
|
||||
labels=related_data["labels"].get(issue.id, []),
|
||||
sort_order=issue.sort_order,
|
||||
completed_at=issue.completed_at,
|
||||
archived_at=issue.archived_at,
|
||||
is_draft=issue.is_draft,
|
||||
external_source=issue.external_source,
|
||||
external_id=issue.external_id,
|
||||
type=issue.type_id,
|
||||
cycle=related_data["cycle_issues"].get(issue.id),
|
||||
modules=related_data["modules"].get(issue.id, []),
|
||||
)
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return None
|
||||
|
||||
|
||||
@shared_task
|
||||
def sync_issue_version(batch_size=5000, offset=0, countdown=300):
|
||||
"""Task to create IssueVersion records for existing Issues in batches"""
|
||||
|
||||
try:
|
||||
with transaction.atomic():
|
||||
base_query = Issue.objects
|
||||
total_issues_count = base_query.count()
|
||||
|
||||
if total_issues_count == 0:
|
||||
return
|
||||
|
||||
print(f"Offset: {offset}")
|
||||
print(f"Total Issues: {total_issues_count}")
|
||||
|
||||
end_offset = min(offset + batch_size, total_issues_count)
|
||||
|
||||
# Get issues batch with optimized queries
|
||||
issues_batch = list(
|
||||
base_query.order_by("created_at")
|
||||
.select_related("workspace", "project")
|
||||
.all()[offset:end_offset]
|
||||
)
|
||||
|
||||
if not issues_batch:
|
||||
return
|
||||
|
||||
# Get all related data in bulk
|
||||
issue_ids = [issue.id for issue in issues_batch]
|
||||
related_data = get_related_data(issue_ids)
|
||||
|
||||
issue_versions = []
|
||||
for issue in issues_batch:
|
||||
version = create_issue_version(issue, related_data)
|
||||
if version:
|
||||
issue_versions.append(version)
|
||||
|
||||
# Bulk create versions
|
||||
if issue_versions:
|
||||
IssueVersion.objects.bulk_create(issue_versions, batch_size=1000)
|
||||
|
||||
# Schedule the next batch if there are more workspaces to process
|
||||
if end_offset < total_issues_count:
|
||||
sync_issue_version.apply_async(
|
||||
kwargs={
|
||||
"batch_size": batch_size,
|
||||
"offset": end_offset,
|
||||
"countdown": countdown,
|
||||
},
|
||||
countdown=countdown,
|
||||
)
|
||||
|
||||
print(f"Processed Issues: {end_offset}")
|
||||
return
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return
|
||||
|
||||
|
||||
@shared_task
|
||||
def schedule_issue_version(batch_size=5000, countdown=300):
|
||||
sync_issue_version.delay(batch_size=batch_size, countdown=countdown)
|
||||
90
apiserver/plane/bgtasks/issue_version_task.py
Normal file
90
apiserver/plane/bgtasks/issue_version_task.py
Normal file
@@ -0,0 +1,90 @@
|
||||
# Python imports
|
||||
import json
|
||||
from typing import Dict, Any, Optional, List
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.db import transaction
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import Issue, IssueVersion
|
||||
from plane.utils.exception_logger import log_exception
|
||||
|
||||
|
||||
def get_changed_fields(current_issue: Dict[str, Any], issue: Issue) -> Dict[str, Any]:
|
||||
return {
|
||||
key: value
|
||||
for key, value in current_issue.items()
|
||||
if getattr(issue, key) != value
|
||||
}
|
||||
|
||||
|
||||
def should_update_existing_version(
|
||||
version: Optional[IssueVersion], user_id: str, max_time_difference: int = 600
|
||||
) -> bool:
|
||||
if not version:
|
||||
return False
|
||||
|
||||
time_difference = (timezone.now() - version.last_saved_at).total_seconds()
|
||||
return (
|
||||
str(version.owned_by_id) == str(user_id)
|
||||
and time_difference <= max_time_difference
|
||||
)
|
||||
|
||||
|
||||
def update_version_fields(
|
||||
version: IssueVersion, changed_fields: Dict[str, Any]
|
||||
) -> List[str]:
|
||||
for key, value in changed_fields.items():
|
||||
setattr(version, key, value)
|
||||
|
||||
version.last_saved_at = timezone.now()
|
||||
update_fields = list(changed_fields.keys()) + ["last_saved_at"]
|
||||
return update_fields
|
||||
|
||||
|
||||
@shared_task
|
||||
def issue_version_task(
|
||||
updated_issue: Optional[str], issue_id: str, user_id: str
|
||||
) -> Optional[bool]:
|
||||
try:
|
||||
# Parse updated issue data
|
||||
current_issue: Dict = json.loads(updated_issue) if updated_issue else {}
|
||||
|
||||
with transaction.atomic():
|
||||
# Get current issue
|
||||
issue = Issue.objects.get(id=issue_id)
|
||||
|
||||
# Get changed fields
|
||||
changed_fields = get_changed_fields(current_issue, issue)
|
||||
|
||||
if not changed_fields:
|
||||
return True
|
||||
|
||||
# Get latest version
|
||||
latest_version = (
|
||||
IssueVersion.objects.filter(issue_id=issue_id)
|
||||
.order_by("-last_saved_at")
|
||||
.first()
|
||||
)
|
||||
|
||||
# Update existing or create new version
|
||||
if should_update_existing_version(latest_version, user_id):
|
||||
update_fields = update_version_fields(latest_version, changed_fields)
|
||||
latest_version.save(update_fields=update_fields)
|
||||
else:
|
||||
IssueVersion.log_issue_version(issue, user_id)
|
||||
|
||||
return True
|
||||
|
||||
except Issue.DoesNotExist:
|
||||
return None
|
||||
except json.JSONDecodeError as e:
|
||||
log_exception(f"Invalid JSON for updated_issue: {e}")
|
||||
return False
|
||||
except Exception as e:
|
||||
log_exception(f"Error processing issue version: {e}")
|
||||
return False
|
||||
@@ -0,0 +1,23 @@
|
||||
# Django imports
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
# Module imports
|
||||
from plane.bgtasks.issue_description_version_sync import (
|
||||
schedule_issue_description_version,
|
||||
)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Creates IssueDescriptionVersion records for existing Issues in batches"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
batch_size = input("Enter the batch size: ")
|
||||
batch_countdown = input("Enter the batch countdown: ")
|
||||
|
||||
schedule_issue_description_version.delay(
|
||||
batch_size=int(batch_size), countdown=int(batch_countdown)
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Successfully created issue description version task")
|
||||
)
|
||||
19
apiserver/plane/db/management/commands/sync_issue_version.py
Normal file
19
apiserver/plane/db/management/commands/sync_issue_version.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Django imports
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
# Module imports
|
||||
from plane.bgtasks.issue_version_sync import schedule_issue_version
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Creates IssueVersion records for existing Issues in batches"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
batch_size = input("Enter the batch size: ")
|
||||
batch_countdown = input("Enter the batch countdown: ")
|
||||
|
||||
schedule_issue_version.delay(
|
||||
batch_size=int(batch_size), countdown=int(batch_countdown)
|
||||
)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("Successfully created issue version task"))
|
||||
@@ -0,0 +1,90 @@
|
||||
# Generated by Django 4.2.16 on 2024-12-09 10:03
|
||||
|
||||
from django.conf import settings
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import plane.db.models.user
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0086_issueversion_alter_teampage_unique_together_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='issueversion',
|
||||
name='description',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='issueversion',
|
||||
name='description_binary',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='issueversion',
|
||||
name='description_html',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='issueversion',
|
||||
name='description_stripped',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='issueversion',
|
||||
name='activity',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='versions', to='db.issueactivity'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='issueversion',
|
||||
name='point',
|
||||
field=models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(12)]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='profile',
|
||||
name='is_mobile_onboarded',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='profile',
|
||||
name='mobile_onboarding_step',
|
||||
field=models.JSONField(default=plane.db.models.user.get_mobile_default_onboarding),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='profile',
|
||||
name='mobile_timezone_auto_set',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='issueversion',
|
||||
name='owned_by',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_versions', to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='IssueDescriptionVersion',
|
||||
fields=[
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('deleted_at', models.DateTimeField(blank=True, null=True, verbose_name='Deleted At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('description_binary', models.BinaryField(null=True)),
|
||||
('description_html', models.TextField(blank=True, default='<p></p>')),
|
||||
('description_stripped', models.TextField(blank=True, null=True)),
|
||||
('description_json', models.JSONField(blank=True, default=dict)),
|
||||
('last_saved_at', models.DateTimeField(default=django.utils.timezone.now)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='description_versions', to='db.issue')),
|
||||
('owned_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_description_versions', to=settings.AUTH_USER_MODEL)),
|
||||
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Issue Description Version',
|
||||
'verbose_name_plural': 'Issue Description Versions',
|
||||
'db_table': 'issue_description_versions',
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -41,6 +41,8 @@ from .issue import (
|
||||
IssueSequence,
|
||||
IssueSubscriber,
|
||||
IssueVote,
|
||||
IssueVersion,
|
||||
IssueDescriptionVersion,
|
||||
)
|
||||
from .module import Module, ModuleIssue, ModuleLink, ModuleMember, ModuleUserProperties
|
||||
from .notification import EmailNotificationLog, Notification, UserNotificationPreference
|
||||
|
||||
@@ -660,9 +660,6 @@ class IssueVote(ProjectBaseModel):
|
||||
|
||||
|
||||
class IssueVersion(ProjectBaseModel):
|
||||
issue = models.ForeignKey(
|
||||
"db.Issue", on_delete=models.CASCADE, related_name="versions"
|
||||
)
|
||||
PRIORITY_CHOICES = (
|
||||
("urgent", "Urgent"),
|
||||
("high", "High"),
|
||||
@@ -670,14 +667,17 @@ class IssueVersion(ProjectBaseModel):
|
||||
("low", "Low"),
|
||||
("none", "None"),
|
||||
)
|
||||
|
||||
issue = models.ForeignKey(
|
||||
"db.Issue", on_delete=models.CASCADE, related_name="versions"
|
||||
)
|
||||
parent = models.UUIDField(blank=True, null=True)
|
||||
state = models.UUIDField(blank=True, null=True)
|
||||
point = models.IntegerField(
|
||||
validators=[MinValueValidator(0), MaxValueValidator(12)], null=True, blank=True
|
||||
)
|
||||
estimate_point = models.UUIDField(blank=True, null=True)
|
||||
name = models.CharField(max_length=255, verbose_name="Issue Name")
|
||||
description = models.JSONField(blank=True, default=dict)
|
||||
description_html = models.TextField(blank=True, default="<p></p>")
|
||||
description_stripped = models.TextField(blank=True, null=True)
|
||||
description_binary = models.BinaryField(null=True)
|
||||
priority = models.CharField(
|
||||
max_length=30,
|
||||
choices=PRIORITY_CHOICES,
|
||||
@@ -686,7 +686,9 @@ class IssueVersion(ProjectBaseModel):
|
||||
)
|
||||
start_date = models.DateField(null=True, blank=True)
|
||||
target_date = models.DateField(null=True, blank=True)
|
||||
assignees = ArrayField(models.UUIDField(), blank=True, default=list)
|
||||
sequence_id = models.IntegerField(default=1, verbose_name="Issue Sequence ID")
|
||||
labels = ArrayField(models.UUIDField(), blank=True, default=list)
|
||||
sort_order = models.FloatField(default=65535)
|
||||
completed_at = models.DateTimeField(null=True)
|
||||
archived_at = models.DateField(null=True)
|
||||
@@ -694,14 +696,22 @@ class IssueVersion(ProjectBaseModel):
|
||||
external_source = models.CharField(max_length=255, null=True, blank=True)
|
||||
external_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
type = models.UUIDField(blank=True, null=True)
|
||||
last_saved_at = models.DateTimeField(default=timezone.now)
|
||||
owned_by = models.UUIDField()
|
||||
assignees = ArrayField(models.UUIDField(), blank=True, default=list)
|
||||
labels = ArrayField(models.UUIDField(), blank=True, default=list)
|
||||
cycle = models.UUIDField(null=True, blank=True)
|
||||
modules = ArrayField(models.UUIDField(), blank=True, default=list)
|
||||
properties = models.JSONField(default=dict)
|
||||
meta = models.JSONField(default=dict)
|
||||
activity = models.ForeignKey(
|
||||
"db.IssueActivity",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
related_name="versions",
|
||||
)
|
||||
properties = models.JSONField(default=dict) # issue properties
|
||||
meta = models.JSONField(default=dict) # issue meta
|
||||
last_saved_at = models.DateTimeField(default=timezone.now)
|
||||
owned_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="issue_versions",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Issue Version"
|
||||
@@ -721,36 +731,87 @@ class IssueVersion(ProjectBaseModel):
|
||||
|
||||
Module = apps.get_model("db.Module")
|
||||
CycleIssue = apps.get_model("db.CycleIssue")
|
||||
IssueAssignee = apps.get_model("db.IssueAssignee")
|
||||
IssueLabel = apps.get_model("db.IssueLabel")
|
||||
|
||||
cycle_issue = CycleIssue.objects.filter(issue=issue).first()
|
||||
|
||||
cls.objects.create(
|
||||
issue=issue,
|
||||
parent=issue.parent,
|
||||
state=issue.state,
|
||||
parent=issue.parent_id,
|
||||
state=issue.state_id,
|
||||
point=issue.point,
|
||||
estimate_point=issue.estimate_point,
|
||||
estimate_point=issue.estimate_point_id,
|
||||
name=issue.name,
|
||||
description=issue.description,
|
||||
description_html=issue.description_html,
|
||||
description_stripped=issue.description_stripped,
|
||||
description_binary=issue.description_binary,
|
||||
priority=issue.priority,
|
||||
start_date=issue.start_date,
|
||||
target_date=issue.target_date,
|
||||
assignees=list(
|
||||
IssueAssignee.objects.filter(issue=issue).values_list(
|
||||
"assignee_id", flat=True
|
||||
)
|
||||
),
|
||||
sequence_id=issue.sequence_id,
|
||||
labels=list(
|
||||
IssueLabel.objects.filter(issue=issue).values_list(
|
||||
"label_id", flat=True
|
||||
)
|
||||
),
|
||||
sort_order=issue.sort_order,
|
||||
completed_at=issue.completed_at,
|
||||
archived_at=issue.archived_at,
|
||||
is_draft=issue.is_draft,
|
||||
external_source=issue.external_source,
|
||||
external_id=issue.external_id,
|
||||
type=issue.type,
|
||||
last_saved_at=issue.last_saved_at,
|
||||
assignees=issue.assignees,
|
||||
labels=issue.labels,
|
||||
cycle=cycle_issue.cycle if cycle_issue else None,
|
||||
modules=Module.objects.filter(issue=issue).values_list("id", flat=True),
|
||||
type=issue.type_id,
|
||||
cycle=cycle_issue.cycle_id if cycle_issue else None,
|
||||
modules=list(
|
||||
Module.objects.filter(issue=issue).values_list("id", flat=True)
|
||||
),
|
||||
properties={},
|
||||
meta={},
|
||||
last_saved_at=timezone.now(),
|
||||
owned_by=user,
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
log_exception(e)
|
||||
return False
|
||||
|
||||
|
||||
class IssueDescriptionVersion(ProjectBaseModel):
|
||||
issue = models.ForeignKey(
|
||||
"db.Issue", on_delete=models.CASCADE, related_name="description_versions"
|
||||
)
|
||||
description_binary = models.BinaryField(null=True)
|
||||
description_html = models.TextField(blank=True, default="<p></p>")
|
||||
description_stripped = models.TextField(blank=True, null=True)
|
||||
description_json = models.JSONField(default=dict, blank=True)
|
||||
last_saved_at = models.DateTimeField(default=timezone.now)
|
||||
owned_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="issue_description_versions",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Issue Description Version"
|
||||
verbose_name_plural = "Issue Description Versions"
|
||||
db_table = "issue_description_versions"
|
||||
|
||||
@classmethod
|
||||
def log_issue_description_version(cls, issue, user):
|
||||
try:
|
||||
"""
|
||||
Log the issue description version
|
||||
"""
|
||||
cls.objects.create(
|
||||
issue=issue,
|
||||
description_binary=issue.description_binary,
|
||||
description_html=issue.description_html,
|
||||
description_stripped=issue.description_stripped,
|
||||
description_json=issue.description,
|
||||
last_saved_at=timezone.now(),
|
||||
owned_by=user,
|
||||
)
|
||||
return True
|
||||
|
||||
@@ -26,6 +26,14 @@ def get_default_onboarding():
|
||||
}
|
||||
|
||||
|
||||
def get_mobile_default_onboarding():
|
||||
return {
|
||||
"profile_complete": False,
|
||||
"workspace_create": False,
|
||||
"workspace_join": False,
|
||||
}
|
||||
|
||||
|
||||
class User(AbstractBaseUser, PermissionsMixin):
|
||||
id = models.UUIDField(
|
||||
default=uuid.uuid4, unique=True, editable=False, db_index=True, primary_key=True
|
||||
@@ -178,6 +186,10 @@ class Profile(TimeAuditModel):
|
||||
billing_address = models.JSONField(null=True)
|
||||
has_billing_address = models.BooleanField(default=False)
|
||||
company_name = models.CharField(max_length=255, blank=True)
|
||||
# mobile
|
||||
is_mobile_onboarded = models.BooleanField(default=False)
|
||||
mobile_onboarding_step = models.JSONField(default=get_mobile_default_onboarding)
|
||||
mobile_timezone_auto_set = models.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Profile"
|
||||
|
||||
@@ -262,6 +262,9 @@ CELERY_IMPORTS = (
|
||||
"plane.license.bgtasks.tracer",
|
||||
# management tasks
|
||||
"plane.bgtasks.dummy_data_task",
|
||||
# issue sync tasks
|
||||
"plane.bgtasks.issue_version_sync",
|
||||
"plane.bgtasks.issue_description_version_sync",
|
||||
)
|
||||
|
||||
# Sentry Settings
|
||||
|
||||
Reference in New Issue
Block a user