Compare commits

...

4 Commits

Author SHA1 Message Date
pablohashescobar
c003c70492 dev: remove unused imports 2024-07-10 14:56:38 +05:30
pablohashescobar
8efdb34ffd dev: update migrations to run async 2024-07-10 14:50:19 +05:30
pablohashescobar
15a3aa0203 dev: revert data migrations 2024-07-10 12:03:08 +05:30
pablohashescobar
0ef75a6381 dev: remove issue type back migrations 2024-07-09 20:24:02 +05:30
5 changed files with 129 additions and 29 deletions

View File

View File

@@ -0,0 +1,63 @@
# Third party imports
from celery import shared_task
# Django imports
from django.utils import timezone
# Module imports
from plane.db.models import PageVersion, IssueType, Issue
@shared_task
def backfill_issue_type_task(projects):
# Create the issue types for all projects
IssueType.objects.bulk_create(
[
IssueType(
name="Task",
description="A task that needs to be completed.",
project_id=project["id"],
workspace_id=project["workspace_id"],
)
for project in projects
],
batch_size=1000,
)
# Update the issue type for all existing issues
issue_types = {
str(issue_type["project_id"]): str(issue_type["id"])
for issue_type in IssueType.objects.filter(
project_id__in=[project["id"] for project in projects]
).values("id", "project_id")
}
# Update the issue type for all existing issues
bulk_issues = []
for issue in Issue.objects.filter(
project_id__in=[project["id"] for project in projects]
):
issue.type_id = issue_types[str(issue.project_id)]
bulk_issues.append(issue)
# Update the issue type for all existing issues
Issue.objects.bulk_update(bulk_issues, ["type_id"], batch_size=1000)
@shared_task
def backfill_page_versions_task(pages):
# Create the page versions for all pages
PageVersion.objects.bulk_create(
[
PageVersion(
page_id=page["id"],
workspace_id=page["workspace_id"],
last_saved_at=timezone.now(),
owned_by_id=page["owned_by_id"],
description_binary=page["description_binary"],
description_html=page["description_html"],
description_stripped=page["description_stripped"],
)
for page in pages
],
batch_size=1000,
)

View File

@@ -4,38 +4,67 @@ from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
from apiserver.plane.db.backfills.backfill_0070_page_versions import (
backfill_issue_type_task,
backfill_page_versions_task,
)
CHUNK_SIZE = 100 # Initial Delay in seconds
INITIAL_DELAY = 30 # Initial delay in seconds
INCREMENT_DELAY = 1 # Increment delay in seconds
def create_issue_types(apps, schema_editor):
def backfill_issue_types(apps, schema_editor):
start = 0
end = CHUNK_SIZE
Project = apps.get_model("db", "Project")
Issue = apps.get_model("db", "Issue")
IssueType = apps.get_model("db", "IssueType")
# Create the issue types for all projects
IssueType.objects.bulk_create(
[
IssueType(
name="Task",
description="A task that needs to be completed.",
project_id=project["id"],
workspace_id=project["workspace_id"],
)
for project in Project.objects.values("id", "workspace_id")
],
batch_size=1000,
)
# Update the issue type for all existing issues
issue_types = {
str(issue_type["project_id"]): str(issue_type["id"])
for issue_type in IssueType.objects.values("id", "project_id")
}
# Update the issue type for all existing issues
bulk_issues = []
for issue in Issue.objects.all():
issue.type_id = issue_types[str(issue.project_id)]
bulk_issues.append(issue)
# Update the issue type for all existing issues
Issue.objects.bulk_update(bulk_issues, ["type_id"], batch_size=1000)
total = Project.objects.count()
delay_increment = INITIAL_DELAY
while start < total:
projects = list(
Project.objects.values("id", "workspace_id")[start:end]
)
backfill_issue_type_task.apply_async(
(projects,), countdown=delay_increment
)
delay_increment += (
INCREMENT_DELAY # Increment delay for the next batch
)
start += CHUNK_SIZE
end += CHUNK_SIZE
def backfill_page_versions(apps, schema_editor):
start = 0
end = CHUNK_SIZE
Page = apps.get_model("db", "Page")
total = Page.objects.count()
delay_increment = INITIAL_DELAY
while start < total:
pages = list(
Page.objects.values(
"id",
"workspace_id",
"owned_by_id",
"description_binary",
"description_html",
"description_stripped",
)[start:end]
)
backfill_page_versions_task.apply_async(
(pages,), countdown=delay_increment
)
delay_increment += (
INCREMENT_DELAY # Increment delay for the next batch
)
start += CHUNK_SIZE
end += CHUNK_SIZE
class Migration(migrations.Migration):
@@ -140,7 +169,6 @@ class Migration(migrations.Migration):
name="is_service",
field=models.BooleanField(default=False),
),
migrations.RunPython(create_issue_types),
migrations.CreateModel(
name="PageVersion",
fields=[
@@ -180,6 +208,10 @@ class Migration(migrations.Migration):
"description_stripped",
models.TextField(blank=True, null=True),
),
(
"description_json",
models.JSONField(blank=True, default=dict),
),
(
"created_by",
models.ForeignKey(
@@ -274,4 +306,6 @@ class Migration(migrations.Migration):
name="target_date",
field=models.DateTimeField(blank=True, null=True),
),
migrations.RunPython(backfill_issue_types),
migrations.RunPython(backfill_page_versions),
]

View File

@@ -281,6 +281,7 @@ class PageVersion(BaseModel):
description_binary = models.BinaryField(null=True)
description_html = models.TextField(blank=True, default="<p></p>")
description_stripped = models.TextField(blank=True, null=True)
description_json = models.JSONField(default=dict, blank=True)
class Meta:
verbose_name = "Page Version"

View File

@@ -276,6 +276,8 @@ CELERY_IMPORTS = (
"plane.bgtasks.api_logs_task",
# management tasks
"plane.bgtasks.dummy_data_task",
# backfill tasks
"plane.db.backfills.backfill_0070_page_versions",
)
# Sentry Settings